diff --git a/.github/workflows/R_CMD_check_Hades.yaml b/.github/workflows/R_CMD_check_Hades.yaml index 8d5a924c..5887ede7 100644 --- a/.github/workflows/R_CMD_check_Hades.yaml +++ b/.github/workflows/R_CMD_check_Hades.yaml @@ -20,9 +20,6 @@ jobs: fail-fast: false matrix: config: - - {os: windows-latest, r: '4.2.3', rtools: '42', rspm: "https://cloud.r-project.org"} - #- {os: macOS-latest, r: '4.2.3', rtools: '42', rspm: "https://cloud.r-project.org"} - - {os: ubuntu-20.04, r: '4.2.3', rtools: '42', rspm: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest"} - {os: windows-latest, r: 'release', rtools: '', rspm: "https://cloud.r-project.org"} - {os: macOS-latest, r: 'release', rtools: '', rspm: "https://cloud.r-project.org"} - {os: ubuntu-20.04, r: 'release', rtools: '', rspm: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest"} @@ -61,7 +58,6 @@ jobs: - uses: r-lib/actions/setup-r@v2 with: r-version: ${{ matrix.config.r }} - rtools-version: ${{ matrix.config.rtools }} - uses: r-lib/actions/setup-tinytex@v2 @@ -70,11 +66,7 @@ jobs: - name: Install system requirements if: runner.os == 'Linux' run: | - sudo apt-get install -y make - sudo apt-get install -y default-jdk - sudo apt-get install -y libcurl4-openssl-dev - sudo apt-get install -y libssl-dev - sudo apt-get install -y libglpk-dev + sudo apt-get install -y libssh-dev while read -r cmd do eval sudo $cmd diff --git a/DESCRIPTION b/DESCRIPTION index 1fa7e091..dbbcabbb 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,8 +1,8 @@ Package: Strategus Type: Package Title: Coordinating and Executing Analytics Using HADES Modules -Version: 0.3.0 -Date: 2023-06-04 +Version: 1.0.0 +Date: 2024-07-10 Authors@R: c( person("Martijn", "Schuemie", email = "schuemie@ohdsi.org", role = c("aut")), person("Anthony", "Sena", email = "sena@ohdsi.org", role = c("aut", "cre")), @@ -17,36 +17,44 @@ BugReports: https://github.com/OHDSI/Strategus/issues Depends: R (>= 4.2.0), CohortGenerator (>= 0.8.0), - DatabaseConnector (>= 6.2.3) + DatabaseConnector (>= 6.2.3), + R6 Imports: - targets, - renv (>= 1.0.0), ParallelLogger (>= 3.1.0), dplyr, checkmate, - keyring, rlang, - utils, - R.utils, digest, methods, - tibble, - ResultModelManager (>= 0.3.0), - SqlRender (>= 1.11.0), - semver, - httr2, - jsonlite + ResultModelManager (>= 0.5.8), + SqlRender (>= 1.18.0) Suggests: - testthat (>= 3.0.0), + Characterization, + CirceR, + CohortDiagnostics, + CohortIncidence, + CohortMethod, + Cyclops, + Eunomia, + EvidenceSynthesis, + FeatureExtraction, fs, knitr, + PatientLevelPrediction, rmarkdown, - Eunomia, + RSQLite, + SelfControlledCaseSeries, + testthat (>= 3.0.0), withr Remotes: + ohdsi/Characterization, + ohdsi/CohortDiagnostics, ohdsi/CohortGenerator, + ohdsi/CohortIncidence, + ohdsi/CohortMethod, + ohdsi/PatientLevelPrediction, ohdsi/ResultModelManager, - ohdsi/Eunomia + ohdsi/SelfControlledCaseSeries VignetteBuilder: knitr NeedsCompilation: no RoxygenNote: 7.3.1 diff --git a/NAMESPACE b/NAMESPACE index 373b0586..d78a9edf 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -1,25 +1,36 @@ # Generated by roxygen2: do not edit by hand +export(CharacterizationModule) +export(CohortDiagnosticsModule) +export(CohortGeneratorModule) +export(CohortIncidenceModule) +export(CohortMethodModule) +export(EvidenceSynthesisModule) +export(PatientLevelPredictionModule) +export(SelfControlledCaseSeriesModule) +export(StrategusModule) +export(addCharacterizationModuleSpecifications) +export(addCohortDiagnosticsModuleSpecifications) +export(addCohortGeneratorModuleSpecifications) +export(addCohortIncidenceModuleSpecifications) +export(addCohortMethodeModuleSpecifications) +export(addEvidenceSynthesisModuleSpecifications) export(addModuleSpecifications) +export(addPatientLevelPredictionModuleSpecifications) +export(addSelfControlledCaseSeriesModuleSpecifications) export(addSharedResources) -export(compareLockFiles) export(createCdmExecutionSettings) export(createEmptyAnalysisSpecificiations) -export(createResultDataModels) +export(createResultDataModel) +export(createResultsDataModelSettings) export(createResultsExecutionSettings) -export(ensureAllModulesInstantiated) export(execute) -export(getModuleList) -export(installLatestModule) -export(retrieveConnectionDetails) -export(storeConnectionDetails) -export(syncLockFile) -export(unlockKeyring) -export(validateLockFile) -export(verifyModuleInstallation) +export(getDatabaseIdentifierFilePath) +export(uploadResults) export(zipResults) import(CohortGenerator) import(DatabaseConnector) +import(R6) import(dplyr) importFrom(methods,is) importFrom(rlang,.data) diff --git a/R/DatabaseMetaData.R b/R/DatabaseMetaData.R index b4105ade..a99d528a 100644 --- a/R/DatabaseMetaData.R +++ b/R/DatabaseMetaData.R @@ -21,16 +21,27 @@ # carefully consider serialization and deserialization to JSON, which currently # uses custom functionality in ParallelLogger to maintain object attributes. -createDatabaseMetaData <- function(executionSettings, keyringName = NULL) { - databaseMetaDataFolder <- file.path(executionSettings$resultsFolder, "DatabaseMetaData") +#' Provides the file path to the database identifier file created +#' by Strategus +#' +#' @description +#' This function is used to identify the location of the database identifier +#' created by Strategus when running an analysis specification. This +#' location is important when uploading results since the database identifier +#' may be needed to purge old results for a given database identifier. +#' +#' @template resultsFolder +#' @export +getDatabaseIdentifierFilePath <- function(resultsFolder) { + return(file.path(.getDatabaseMetaDataResultsFolder(resultsFolder), "database_meta_data.csv")) +} + +.createDatabaseMetaData <- function(executionSettings, connectionDetails) { + databaseMetaDataFolder <- .getDatabaseMetaDataResultsFolder(executionSettings$resultsFolder) if (!dir.exists(databaseMetaDataFolder)) { dir.create(databaseMetaDataFolder, recursive = TRUE) } - connectionDetails <- retrieveConnectionDetails( - connectionDetailsReference = executionSettings$connectionDetailsReference, - keyringName = keyringName - ) connection <- DatabaseConnector::connect(connectionDetails) on.exit(DatabaseConnector::disconnect(connection)) @@ -54,7 +65,10 @@ createDatabaseMetaData <- function(executionSettings, keyringName = NULL) { } resultsDataModel <- CohortGenerator::readCsv( - file = system.file("databaseMetaDataRdms.csv", package = "Strategus"), + file = system.file( + file.path("csv", "databaseMetaDataRdms.csv"), + package = "Strategus" + ), warnOnCaseMismatch = FALSE ) @@ -132,3 +146,53 @@ createDatabaseMetaData <- function(executionSettings, keyringName = NULL) { ) return(databaseId) } + +.createDatabaseMetadataResultsDataModel <- function(resultsConnectionDetails, + resultsDataModelSettings) { + rdmsFile <- file.path(.getDatabaseMetaDataResultsFolder(resultsDataModelSettings$resultsFolder), "resultsDataModelSpecification.csv") + if (file.exists(rdmsFile)) { + rlang::inform("Creating results data model for database metadata") + connection <- DatabaseConnector::connect(resultsConnectionDetails) + on.exit(DatabaseConnector::disconnect(connection)) + + # Create the SQL from the resultsDataModelSpecification.csv + sql <- ResultModelManager::generateSqlSchema( + csvFilepath = rdmsFile + ) + sql <- SqlRender::render( + sql = sql, + database_schema = resultsDataModelSettings$resultsDatabaseSchema + ) + DatabaseConnector::executeSql(connection = connection, sql = sql) + } else { + warning("DatabaseMetaData not found - skipping table creation") + } +} + +.uploadDatabaseMetadata <- function(resultsConnectionDetails, + resultsDataModelSettings) { + databaseMetaDataResultsFolder <- .getDatabaseMetaDataResultsFolder(resultsDataModelSettings$resultsFolder) + rdmsFile <- file.path(.getDatabaseMetaDataResultsFolder(resultsDataModelSettings$resultsFolder), "resultsDataModelSpecification.csv") + if (file.exists(rdmsFile)) { + rlang::inform("Uploading database metadata") + connection <- DatabaseConnector::connect(resultsConnectionDetails) + on.exit(DatabaseConnector::disconnect(connection)) + + specification <- CohortGenerator::readCsv(file = rdmsFile) + ResultModelManager::uploadResults( + connection = connection, + schema = resultsDataModelSettings$resultsDatabaseSchema, + resultsFolder = databaseMetaDataResultsFolder, + purgeSiteDataBeforeUploading = TRUE, + databaseIdentifierFile = getDatabaseIdentifierFilePath(resultsDataModelSettings$resultsFolder), + specifications = specification + ) + } else { + warning("DatabaseMetaData not found - skipping table creation") + } +} + +.getDatabaseMetaDataResultsFolder <- function(resultsFolder) { + return(file.path(resultsFolder, "DatabaseMetaData")) +} + diff --git a/R/Execution.R b/R/Execution.R index 100d11b4..8da41623 100644 --- a/R/Execution.R +++ b/R/Execution.R @@ -25,14 +25,7 @@ #' #' @template AnalysisSpecifications #' @template executionSettings -#' @param executionScriptFolder Optional: the path to use for storing the execution script. -#' when NULL, this function will use a temporary -#' file location to create the script to execute. -#' @template keyringName -#' @param restart Restart run? Requires `executionScriptFolder` to be specified, and be -#' the same as the `executionScriptFolder` used in the run to restart. -#' -#' @template enforceModuleDependencies +#' @template connectionDetails #' #' @return #' Does not return anything. Is called for the side-effect of executing the specified @@ -41,25 +34,16 @@ #' @export execute <- function(analysisSpecifications, executionSettings, - executionScriptFolder = NULL, - keyringName = NULL, - restart = FALSE, - enforceModuleDependencies = TRUE) { + connectionDetails) { errorMessages <- checkmate::makeAssertCollection() - keyringList <- keyring::keyring_list() checkmate::assertClass(analysisSpecifications, "AnalysisSpecifications", add = errorMessages) checkmate::assertClass(executionSettings, "ExecutionSettings", add = errorMessages) - checkmate::assertChoice(x = keyringName, choices = keyringList$keyring, null.ok = TRUE, add = errorMessages) checkmate::reportAssertions(collection = errorMessages) # Validate the execution settings if (is(executionSettings, "CdmExecutionSettings")) { # Assert that the temp emulation schema is set if required for the dbms # specified by the executionSettings - connectionDetails <- retrieveConnectionDetails( - connectionDetailsReference = executionSettings$connectionDetailsReference, - keyringName = keyringName - ) DatabaseConnector::assertTempEmulationSchemaSet( dbms = connectionDetails$dbms, tempEmulationSchema = executionSettings$tempEmulationSchema @@ -87,7 +71,6 @@ execute <- function(analysisSpecifications, return(TRUE) }, warning = function(w) { - warning(w) return(FALSE) } ) @@ -101,149 +84,47 @@ execute <- function(analysisSpecifications, } } - # Validate the modules - modules <- ensureAllModulesInstantiated( - analysisSpecifications = analysisSpecifications, - enforceModuleDependencies = enforceModuleDependencies - ) - if (isFALSE(modules$allModulesInstalled)) { - stop("Stopping execution due to module issues") - } - - if (is.null(executionScriptFolder)) { - executionScriptFolder <- tempfile("strategusTempSettings") - dir.create(executionScriptFolder) - on.exit(unlink(executionScriptFolder, recursive = TRUE)) - } else if (!restart) { - if (dir.exists(executionScriptFolder)) { - unlink(executionScriptFolder, recursive = TRUE) - } - dir.create(executionScriptFolder, recursive = TRUE) + # Set up logging + if (!dir.exists(dirname(executionSettings$logFileName))) { + dir.create(dirname(executionSettings$logFileName), recursive = T) } - # Normalize path to convert from relative to absolute path - executionScriptFolder <- normalizePath(executionScriptFolder, mustWork = F) + ParallelLogger::addDefaultFileLogger( + name = "STRATEGUS_LOGGER", + fileName = executionSettings$logFileName + ) + on.exit(ParallelLogger::unregisterLogger("STRATEGUS_LOGGER")) if (is(executionSettings, "CdmExecutionSettings")) { - executionSettings$databaseId <- createDatabaseMetaData( + executionSettings$databaseId <- .createDatabaseMetaData( executionSettings = executionSettings, - keyringName = keyringName + connectionDetails = connectionDetails ) } - dependencies <- extractDependencies(modules$modules) - - - fileName <- generateTargetsScript( - analysisSpecifications = analysisSpecifications, - executionSettings = executionSettings, - dependencies = dependencies, - executionScriptFolder = executionScriptFolder, - restart = restart, - keyringName = keyringName - ) - # targets::tar_manifest(script = fileName) - # targets::tar_glimpse(script = fileName) - targets::tar_make(script = fileName, store = file.path(executionScriptFolder, "_targets")) -} - -generateTargetsScript <- function(analysisSpecifications, executionSettings, dependencies, executionScriptFolder, keyringName, restart) { - fileName <- file.path(executionScriptFolder, "script.R") - if (restart) { - return(fileName) - } - - ### Note anything inisde this block will be scoped inside the targets script file - targets::tar_script( - { - ## - # Generated by Strategus - not advisable to edit by hand - ## - analysisSpecificationsLoad <- readRDS(analysisSpecificationsFileName) - moduleToTargetNames <- readRDS(moduleToTargetNamesFileName) - dependencies <- readRDS(dependenciesFileName) - targets::tar_option_set(packages = c("Strategus", "keyring"), imports = c("Strategus", "keyring")) - targetList <- list( - targets::tar_target(analysisSpecifications, readRDS(analysisSpecificationsFileName)), - # NOTE Execution settings could be mapped to many different cdms making re-execution across cdms much simpler - targets::tar_target(executionSettings, readRDS(executionSettingsFileName)), - targets::tar_target(keyringSettings, readRDS(keyringSettingsFileName)) + # Execute the cohort generator module first if it exists + for (i in 1:length(analysisSpecifications$moduleSpecifications)) { + moduleName <- analysisSpecifications$moduleSpecifications[[i]]$module + if (tolower(moduleName) == "cohortgeneratormodule") { + cg <- CohortGeneratorModule$new() + cg$execute( + connectionDetails = connectionDetails, + analysisSpecifications = analysisSpecifications, + executionSettings = executionSettings ) + break; + } + } - # factory for producing module targets based on their dependencies - # This could be inside Strategus as an exported function - # it would also be much cleaner to use a targets pattern = cross(analysisSpecifications$moduleSpecifications) - # however, working out how to handle dependencies wasn't obvious - # This approach could be modified to allow multiple executionSettings, but that would require a substantial re-write - for (i in 1:length(analysisSpecificationsLoad$moduleSpecifications)) { - moduleSpecification <- analysisSpecificationsLoad$moduleSpecifications[[i]] - targetName <- sprintf("%s_%d", moduleSpecification$module, i) - dependencyModules <- dependencies[dependencies$module == moduleSpecification$module, ]$dependsOn - dependencyTargetNames <- moduleToTargetNames[moduleToTargetNames$module %in% dependencyModules, ]$targetName - - # Use of tar_target_raw allows dynamic names - targetList[[length(targetList) + 1]] <- targets::tar_target_raw(targetName, - substitute(Strategus:::runModule(analysisSpecifications, keyringSettings, i, executionSettings), - env = list(i = i) - ), - deps = c("analysisSpecifications", "keyringSettings", "executionSettings", dependencyTargetNames) - ) - - if (execResultsUpload) { - resultsTargetName <- paste0(targetName, "_results_upload") - targetList[[length(targetList) + 1]] <- targets::tar_target_raw(resultsTargetName, - substitute(Strategus:::runResultsUpload(analysisSpecifications, keyringSettings, i, executionSettings), - env = list(i = i) - ), - deps = c("analysisSpecifications", "keyringSettings", "executionSettings", targetName) - ) - } - } - targetList - }, - script = fileName - ) - - # Store settings objects in the temp folder so they are available in targets - analysisSpecificationsFileName <- .formatAndNormalizeFilePathForScript(file.path(executionScriptFolder, "analysisSpecifications.rds")) - saveRDS(analysisSpecifications, analysisSpecificationsFileName) - executionSettingsFileName <- .formatAndNormalizeFilePathForScript(file.path(executionScriptFolder, "executionSettings.rds")) - saveRDS(executionSettings, executionSettingsFileName) - keyringSettingsFileName <- .formatAndNormalizeFilePathForScript(file.path(executionScriptFolder, "keyringSettings.rds")) - saveRDS(list(keyringName = keyringName), keyringSettingsFileName) - - # Generate target names by module type - moduleToTargetNames <- list() + # Execute any other modules for (i in 1:length(analysisSpecifications$moduleSpecifications)) { - moduleSpecification <- analysisSpecifications$moduleSpecifications[[i]] - targetName <- sprintf("%s_%d", moduleSpecification$module, i) - moduleToTargetNames[[length(moduleToTargetNames) + 1]] <- tibble( - module = moduleSpecification$module, - targetName = targetName - ) + moduleName <- analysisSpecifications$moduleSpecifications[[i]]$module + if (tolower(moduleName) != "cohortgeneratormodule") { + moduleObj <- get(moduleName)$new() + moduleObj$execute( + connectionDetails = connectionDetails, + analysisSpecifications = analysisSpecifications, + executionSettings = executionSettings + ) + } } - moduleToTargetNames <- bind_rows(moduleToTargetNames) - moduleToTargetNamesFileName <- .formatAndNormalizeFilePathForScript(file.path(executionScriptFolder, "moduleTargetNames.rds")) - saveRDS(moduleToTargetNames, moduleToTargetNamesFileName) - - dependenciesFileName <- .formatAndNormalizeFilePathForScript(file.path(executionScriptFolder, "dependencies.rds")) - saveRDS(dependencies, dependenciesFileName) - - execResultsUpload <- all(c( - is(executionSettings, "CdmExecutionSettings"), - !is.null(executionSettings$resultsConnectionDetailsReference), - !is.null(executionSettings$resultsDatabaseSchema) - )) - - # Settings required inside script. There is probably a much cleaner way of doing this - writeLines(c( - sprintf("analysisSpecificationsFileName <- '%s'", analysisSpecificationsFileName), - sprintf("executionSettingsFileName <- '%s'", executionSettingsFileName), - sprintf("keyringSettingsFileName <- '%s'", keyringSettingsFileName), - sprintf("moduleToTargetNamesFileName <- '%s'", moduleToTargetNamesFileName), - sprintf("dependenciesFileName <- '%s'", dependenciesFileName), - sprintf("execResultsUpload <- '%s'", execResultsUpload), - readLines(fileName) - ), fileName) - - return(fileName) } diff --git a/R/Module-Characterization.R b/R/Module-Characterization.R new file mode 100644 index 00000000..df7325df --- /dev/null +++ b/R/Module-Characterization.R @@ -0,0 +1,210 @@ +# CharacterizationModule ------------- +#' @title Module for generating cohort characterization information +#' @export +#' @description +#' Computes cohort characterization information against the OMOP CDM +#' NOTE: Using v1.0.3 version of module and +#' commit 372fb70c6133bdd8811f8dc1d2a2f9cb9a184345 for the +#' package +CharacterizationModule <- R6::R6Class( + classname = "CharacterizationModule", + inherit = StrategusModule, + public = list( + #' @field tablePrefix The table prefix to append to the results tables + tablePrefix = "c_", + #' @description Initialize the module + initialize = function() { + super$initialize() + }, + #' @description Execute characterization + #' @template connectionDetails + #' @template analysisSpecifications + #' @template executionSettings + execute = function(connectionDetails, analysisSpecifications, executionSettings) { + super$execute(connectionDetails, analysisSpecifications, executionSettings) + checkmate::assertClass(executionSettings, "CdmExecutionSettings") + + jobContext <- private$jobContext + workFolder <- jobContext$moduleExecutionSettings$workSubFolder + + Characterization::runCharacterizationAnalyses( + connectionDetails = connectionDetails, + targetDatabaseSchema = jobContext$moduleExecutionSettings$workDatabaseSchema, + targetTable = jobContext$moduleExecutionSettings$cohortTableNames$cohortTable, + outcomeDatabaseSchema = jobContext$moduleExecutionSettings$workDatabaseSchema, + outcomeTable = jobContext$moduleExecutionSettings$cohortTableNames$cohortTable, + cdmDatabaseSchema = jobContext$moduleExecutionSettings$cdmDatabaseSchema, + characterizationSettings = jobContext$settings, + databaseId = jobContext$moduleExecutionSettings$databaseId, + saveDirectory = workFolder, + tablePrefix = self$tablePrefix + ) + + # Export the results + rlang::inform("Export data to csv files") + + sqliteConnectionDetails <- DatabaseConnector::createConnectionDetails( + dbms = "sqlite", + server = file.path(workFolder, "sqliteCharacterization", "sqlite.sqlite") + ) + + # get the result location folder + resultsFolder <- jobContext$moduleExecutionSettings$resultsSubFolder + + Characterization::exportDatabaseToCsv( + connectionDetails = sqliteConnectionDetails, + resultSchema = "main", + tempEmulationSchema = NULL, + tablePrefix = self$tablePrefix, + filePrefix = self$tablePrefix, + saveDirectory = resultsFolder + ) + + # Export the resultsDataModelSpecification.csv + resultsDataModel <- CohortGenerator::readCsv( + file = system.file( + "settings/resultsDataModelSpecification.csv", + package = "Characterization" + ), + warnOnCaseMismatch = FALSE + ) + + # add the prefix to the tableName column + resultsDataModel$tableName <- paste0(self$tablePrefix, resultsDataModel$tableName) + + CohortGenerator::writeCsv( + x = resultsDataModel, + file = file.path(resultsFolder, "resultsDataModelSpecification.csv"), + warnOnCaseMismatch = FALSE, + warnOnFileNameCaseMismatch = FALSE, + warnOnUploadRuleViolations = FALSE + ) + + private$.message(paste("Results available at:", resultsFolder)) + }, + #' @description Create the results data model for the module + #' @template resultsConnectionDetails + #' @template resultsDatabaseSchema + #' @template tablePrefix + createResultsDataModel = function(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix = self$tablePrefix) { + super$createResultsDataModel(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix) + resultsDataModel <-private$.getResultsDataModelSpecification() + sql <- ResultModelManager::generateSqlSchema( + schemaDefinition = resultsDataModel + ) + sql <- SqlRender::render( + sql = sql, + database_schema = resultsDatabaseSchema + ) + connection <- DatabaseConnector::connect( + connectionDetails = resultsConnectionDetails + ) + on.exit(DatabaseConnector::disconnect(connection)) + DatabaseConnector::executeSql( + connection = connection, + sql = sql + ) + }, + #' @description Upload the results for the module + #' @template resultsConnectionDetails + #' @template analysisSpecifications + #' @template resultsDataModelSettings + uploadResults = function(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) { + super$uploadResults(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) + jobContext <- private$jobContext + resultsFolder <- jobContext$moduleExecutionSettings$resultsSubFolder + + specifications <- private$.getResultsDataModelSpecification() + + ResultModelManager::uploadResults( + connectionDetails = resultsConnectionDetails, + schema = resultsDataModelSettings$resultsDatabaseSchema, + resultsFolder = resultsFolder, + purgeSiteDataBeforeUploading = FALSE, + specifications = specifications + ) + }, + #' @description Creates the CharacterizationModule Specifications + #' @param targetIds A vector of cohort IDs to use as the target(s) for the characterization + #' @param outcomeIds A vector of cohort IDs to use as the outcome(s) for the characterization + #' @param dechallengeStopInterval description + #' @param dechallengeEvaluationWindow description + #' @param timeAtRisk description + #' @param minPriorObservation description + #' @param minCharacterizationMean description + #' @param covariateSettings description + createModuleSpecifications = function(targetIds, + outcomeIds, + dechallengeStopInterval = 30, + dechallengeEvaluationWindow = 30, + timeAtRisk = data.frame( + riskWindowStart = c(1, 1), + startAnchor = c("cohort start", "cohort start"), + riskWindowEnd = c(0, 365), + endAnchor = c("cohort end", "cohort end") + ), + minPriorObservation = 0, + minCharacterizationMean = 0, + covariateSettings = FeatureExtraction::createDefaultCovariateSettings()) { + # input checks + if (!inherits(timeAtRisk, "data.frame")) { + stop("timeAtRisk must be a data.frame") + } + if (nrow(timeAtRisk) == 0) { + stop("timeAtRisk must be a non-empty data.frame") + } + + timeToEventSettings <- Characterization::createTimeToEventSettings( + targetIds = targetIds, + outcomeIds = outcomeIds + ) + + dechallengeRechallengeSettings <- Characterization::createDechallengeRechallengeSettings( + targetIds = targetIds, + outcomeIds = outcomeIds, + dechallengeStopInterval = dechallengeStopInterval, + dechallengeEvaluationWindow = dechallengeEvaluationWindow + ) + + aggregateCovariateSettings <- lapply( + X = 1:nrow(timeAtRisk), + FUN = function(i) { + Characterization::createAggregateCovariateSettings( + targetIds = targetIds, + outcomeIds = outcomeIds, + minPriorObservation = minPriorObservation, + riskWindowStart = timeAtRisk$riskWindowStart[i], + startAnchor = timeAtRisk$startAnchor[i], + riskWindowEnd = timeAtRisk$riskWindowEnd[i], + endAnchor = timeAtRisk$endAnchor[i], + covariateSettings = covariateSettings, + minCharacterizationMean = minCharacterizationMean + ) + } + ) + + analysis <- Characterization::createCharacterizationSettings( + timeToEventSettings = list(timeToEventSettings), + dechallengeRechallengeSettings = list(dechallengeRechallengeSettings), + aggregateCovariateSettings = aggregateCovariateSettings + ) + + specifications <- super$createModuleSpecifications( + moduleSpecifications = analysis + ) + return(specifications) + } + ), + private = list( + .getResultsDataModelSpecification = function(tablePrefix = self$tablePrefix) { + rdms <- CohortGenerator::readCsv( + file = system.file( + "settings/resultsDataModelSpecification.csv", + package = "Characterization" + ) + ) + rdms$tableName <- paste0(tablePrefix, rdms$tableName) + return(rdms) + } + ) +) diff --git a/R/Module-CohortDiagnostics.R b/R/Module-CohortDiagnostics.R new file mode 100644 index 00000000..fcf593a1 --- /dev/null +++ b/R/Module-CohortDiagnostics.R @@ -0,0 +1,214 @@ +# CohortDiagnosticsModule ------------- +#' @title Module for the development and evaluation of phenotype algorithms +#' @export +#' @description +#' Module for the development and evaluation of phenotype algorithms +#' against the OMOP Common Data Model. +CohortDiagnosticsModule <- R6::R6Class( + classname = "CohortDiagnosticsModule", + inherit = StrategusModule, + public = list( + #' @field tablePrefix The table prefix to append to results tables + tablePrefix = "cd_", + #' @description Initialize the module + initialize = function() { + super$initialize() + }, + #' @description Executes the CohortDiagnostics package + #' @template connectionDetails + #' @template analysisSpecifications + #' @template executionSettings + execute = function(connectionDetails, analysisSpecifications, executionSettings) { + super$execute(connectionDetails, analysisSpecifications, executionSettings) + checkmate::assertClass(executionSettings, "CdmExecutionSettings") + + jobContext <- private$jobContext + cohortDefinitionSet <- super$.createCohortDefinitionSetFromJobContext() + + exportFolder <- jobContext$moduleExecutionSettings$resultsSubFolder + args <- jobContext$settings + args$cohortDefinitionSet <- cohortDefinitionSet + args$exportFolder <- exportFolder + args$databaseId <- jobContext$moduleExecutionSettings$databaseId + args$connectionDetails <- connectionDetails + args$cdmDatabaseSchema <- jobContext$moduleExecutionSettings$cdmDatabaseSchema + args$cohortDatabaseSchema <- jobContext$moduleExecutionSettings$workDatabaseSchema + args$cohortTableNames <- jobContext$moduleExecutionSettings$cohortTableNames + args$incrementalFolder <- jobContext$moduleExecutionSettings$workSubFolder + args$minCellCount <- jobContext$moduleExecutionSettings$minCellCount + args$cohortIds <- jobContext$moduleExecutionSettings$cohortIds + do.call(CohortDiagnostics::executeDiagnostics, args) + + # TODO: Removing this to make the upload easier + # unlink(file.path(exportFolder, sprintf("Results_%s.zip", jobContext$moduleExecutionSettings$databaseId))) + + resultsDataModel <- CohortGenerator::readCsv( + file = system.file("settings", "resultsDataModelSpecification.csv", package = "CohortDiagnostics"), + warnOnCaseMismatch = FALSE + ) + resultsDataModel <- resultsDataModel[file.exists(file.path(exportFolder, paste0(resultsDataModel$tableName, ".csv"))), ] + newTableNames <- paste0(self$tablePrefix, resultsDataModel$tableName) + file.rename( + file.path(exportFolder, paste0(unique(resultsDataModel$tableName), ".csv")), + file.path(exportFolder, paste0(unique(newTableNames), ".csv")) + ) + resultsDataModel$tableName <- newTableNames + CohortGenerator::writeCsv( + x = resultsDataModel, + file.path(exportFolder, "resultsDataModelSpecification.csv"), + warnOnFileNameCaseMismatch = FALSE + ) + + private$.message(paste("Results available at:", exportFolder)) + }, + #' @description Create the results data model for the module + #' @template resultsConnectionDetails + #' @template resultsDatabaseSchema + #' @template tablePrefix + createResultsDataModel = function(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix = self$tablePrefix) { + super$createResultsDataModel(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix) + CohortDiagnostics::createResultsDataModel( + connectionDetails = resultsConnectionDetails, + databaseSchema = resultsDatabaseSchema, + tablePrefix = tablePrefix + ) + }, + #' @description Upload the results for the module + #' @template resultsConnectionDetails + #' @template analysisSpecifications + #' @template resultsDataModelSettings + uploadResults = function(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) { + super$uploadResults(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) + + # TODO: This is something CD does differently. + # Find the results zip file in the results sub folder + resultsFolder <- private$jobContext$moduleExecutionSettings$resultsSubFolder + zipFiles <- list.files( + path = resultsFolder, + pattern = "\\.zip$", + full.names = TRUE + ) + + if (length(zipFiles) > 0) { + zipFileName <- zipFiles[1] + } else { + # Create a zip file from the results in the directory + DatabaseConnector::createZipFile( + zipFile = "results.zip", + files = list.files(resultsFolder, pattern = ".*\\.csv$"), + rootFolder = resultsFolder + ) + zipFileName <- file.path(resultsFolder, "results.zip") + } + + CohortDiagnostics::uploadResults( + connectionDetails = resultsConnectionDetails, + schema = resultsDataModelSettings$resultsDatabaseSchema, + tablePrefix = self$tablePrefix, + zipFileName = zipFileName, + purgeSiteDataBeforeUploading = FALSE + ) + }, + #' @description Creates the CohortDiagnostics Module Specifications + #' + #' + #' @param cohortIds A list of cohort IDs to use when running the CohortDiagnostics. + #' Default is NULL which will use all cohorts present in the cohort definition set + #' in the analysis specification + #' @param runInclusionStatistics Generate and export statistic on the cohort inclusion rules? + #' @param runIncludedSourceConcepts Generate and export the source concepts included in the cohorts? + #' @param runOrphanConcepts Generate and export potential orphan concepts? + #' @param runTimeSeries Generate and export the time series diagnostics? + #' @param runVisitContext Generate and export index-date visit context? + #' @param runBreakdownIndexEvents Generate and export the breakdown of index events? + #' @param runIncidenceRate Generate and export the cohort incidence rates? + #' @param runCohortRelationship Generate and export the cohort relationship? Cohort relationship checks the temporal + #' relationship between two or more cohorts. + #' @param runTemporalCohortCharacterization Generate and export the temporal cohort characterization? + #' Only records with values greater than 0.001 are returned. + #' @param temporalCovariateSettings Either an object of type \code{covariateSettings} as created using one of + #' the createTemporalCovariateSettings function in the FeatureExtraction package, or a list + #' of such objects. + #' @param minCharacterizationMean The minimum mean value for characterization output. Values below this will be cut off from output. This + #' will help reduce the file size of the characterization output, but will remove information + #' on covariates that have very low values. The default is 0.001 (i.e. 0.1 percent) + #' @param irWashoutPeriod Number of days washout to include in calculation of incidence rates - default is 0 + #' @param incremental Create only cohort diagnostics that haven't been created before? + createModuleSpecifications = function(cohortIds = NULL, + runInclusionStatistics = TRUE, + runIncludedSourceConcepts = TRUE, + runOrphanConcepts = TRUE, + runTimeSeries = FALSE, + runVisitContext = TRUE, + runBreakdownIndexEvents = TRUE, + runIncidenceRate = TRUE, + runCohortRelationship = TRUE, + runTemporalCohortCharacterization = TRUE, + temporalCovariateSettings = private$.getDefaultCovariateSettings(), + minCharacterizationMean = 0.01, + irWashoutPeriod = 0, + incremental = FALSE) { + analysis <- list() + for (name in names(formals(self$createModuleSpecifications))) { + analysis[[name]] <- get(name) + } + + specifications <- super$createModuleSpecifications( + moduleSpecifications = analysis + ) + return(specifications) + }, + #' @description Validate the module specifications + #' @param moduleSpecifications The CohortIncidence module specifications + validateModuleSpecifications = function(moduleSpecifications) { + super$validateModuleSpecifications( + moduleSpecifications = moduleSpecifications + ) + } + ), + private = list( + .getDefaultCovariateSettings = function() { + covariateSettings <- ' + { + "temporal": true, + "temporalSequence": false, + "DemographicsGender": true, + "DemographicsAge": true, + "DemographicsAgeGroup": true, + "DemographicsRace": true, + "DemographicsEthnicity": true, + "DemographicsIndexYear": true, + "DemographicsIndexMonth": true, + "DemographicsPriorObservationTime": true, + "DemographicsPostObservationTime": true, + "DemographicsTimeInCohort": true, + "DemographicsIndexYearMonth": true, + "ConditionOccurrence": true, + "ConditionEraStart": true, + "ConditionEraOverlap": true, + "ConditionEraGroupOverlap": true, + "DrugEraStart": true, + "DrugEraGroupOverlap": true, + "ProcedureOccurrence": true, + "DeviceExposure": true, + "Measurement": true, + "Observation": true, + "CharlsonIndex": true, + "Dcsi": true, + "Chads2": true, + "Chads2Vasc": true, + "temporalStartDays": [-9999, -365, -180, -30, -365, -30, 0, 1, 31, -9999], + "temporalEndDays": [0, 0, 0, 0, -31, -1, 0, 30, 365, 9999], + "includedCovariateConceptIds": [], + "addDescendantsToInclude": false, + "excludedCovariateConceptIds": [], + "addDescendantsToExclude": false, + "includedCovariateIds": [], + "attr_class": "covariateSettings", + "attr_fun": "getDbDefaultCovariateData" + } + ' + ParallelLogger::convertJsonToSettings(covariateSettings) + } + ) +) diff --git a/R/Module-CohortGenerator.R b/R/Module-CohortGenerator.R new file mode 100644 index 00000000..92b81a09 --- /dev/null +++ b/R/Module-CohortGenerator.R @@ -0,0 +1,210 @@ +# CohortGeneratorModule ------------- +#' @title Module for generating cohorts against an OMOP CDM +#' @export +#' @description +#' Generates cohorts against the OMOP CDM +CohortGeneratorModule <- R6::R6Class( + classname = "CohortGeneratorModule", + inherit = StrategusModule, + public = list( + #' @field cohortDefinitionSharedResourcesClassName A constant for the name + #' of the cohort definition shared resources section of the analysis + #' specification + cohortDefinitionSharedResourcesClassName = "CohortDefinitionSharedResources", + #' @field negativeControlOutcomeSharedResourcesClassName A constant for the + #' name of the negative control outcome shared resources section of the + #' analysis specification + negativeControlOutcomeSharedResourcesClassName = "NegativeControlOutcomeSharedResources", + #' @description Initialize the module + initialize = function() { + super$initialize() + }, + #' @description Generates the cohorts + #' @template connectionDetails + #' @template analysisSpecifications + #' @template executionSettings + execute = function(connectionDetails, analysisSpecifications, executionSettings) { + super$execute(connectionDetails, analysisSpecifications, executionSettings) + checkmate::assertClass(executionSettings, "CdmExecutionSettings") + + jobContext <- private$jobContext + cohortDefinitionSet <- super$.createCohortDefinitionSetFromJobContext() + negativeControlOutcomeSettings <- private$.createNegativeControlOutcomeSettingsFromJobContext() + resultsFolder <- jobContext$moduleExecutionSettings$resultsSubFolder + if (!dir.exists(resultsFolder)) { + dir.create(resultsFolder, recursive = TRUE) + } + + CohortGenerator::runCohortGeneration( + connectionDetails = connectionDetails, + cdmDatabaseSchema = jobContext$moduleExecutionSettings$cdmDatabaseSchema, + cohortDatabaseSchema = jobContext$moduleExecutionSettings$workDatabaseSchema, + cohortTableNames = jobContext$moduleExecutionSettings$cohortTableNames, + cohortDefinitionSet = cohortDefinitionSet, + negativeControlOutcomeCohortSet = negativeControlOutcomeSettings$cohortSet, + occurrenceType = negativeControlOutcomeSettings$occurrenceType, + detectOnDescendants = negativeControlOutcomeSettings$detectOnDescendants, + outputFolder = resultsFolder, + databaseId = jobContext$moduleExecutionSettings$databaseId, + incremental = jobContext$settings$incremental, + incrementalFolder = jobContext$moduleExecutionSettings$workSubFolder + ) + + private$.message(paste("Results available at:", resultsFolder)) + }, + #' @description Create the results data model for the module + #' @template resultsConnectionDetails + #' @template resultsDatabaseSchema + #' @template tablePrefix + createResultsDataModel = function(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix = "") { + super$createResultsDataModel(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix) + CohortGenerator::createResultsDataModel( + connectionDetails = resultsConnectionDetails, + databaseSchema = resultsDatabaseSchema, + tablePrefix = tablePrefix + ) + }, + #' @description Upload the results for the module + #' @template resultsConnectionDetails + #' @template analysisSpecifications + #' @template resultsDataModelSettings + uploadResults = function(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) { + super$uploadResults(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) + + CohortGenerator::uploadResults( + connectionDetails = resultsConnectionDetails, + schema = resultsDataModelSettings$resultsDatabaseSchema, + resultsFolder = private$jobContext$moduleExecutionSettings$resultsSubFolder, + purgeSiteDataBeforeUploading = FALSE + ) + }, + #' @description Creates the CohortGenerator Module Specifications + #' @param incremental When TRUE, the module will keep track of the cohorts + #' generated so that subsequent runs will skip any previously generated + #' cohorts. + #' @param generateStats When TRUE, the Circe cohort definition SQL will + #' include steps to compute inclusion rule statistics. + createModuleSpecifications = function(incremental = TRUE, + generateStats = TRUE) { + analysis <- list() + for (name in names(formals(self$createModuleSpecifications))) { + analysis[[name]] <- get(name) + } + + specifications <- super$createModuleSpecifications( + moduleSpecifications = analysis + ) + return(specifications) + }, + #' @description Create shared specifications for the cohort definition set + #' @param cohortDefinitionSet The cohort definition set to include in the + #' specification. See the CohortGenerator package for details on how to + #' build this object. + createCohortSharedResourceSpecifications = function(cohortDefinitionSet) { + if (!CohortGenerator::isCohortDefinitionSet(cohortDefinitionSet)) { + stop("cohortDefinitionSet is not properly defined") + } + + subsetDefinitions <- CohortGenerator::getSubsetDefinitions(cohortDefinitionSet) + if (length(subsetDefinitions) > 0) { + # Filter the cohort definition set to the "parent" cohorts. + parentCohortDefinitionSet <- cohortDefinitionSet[!cohortDefinitionSet$isSubset, ] + } else { + parentCohortDefinitionSet <- cohortDefinitionSet + } + + sharedResource <- list() + cohortDefinitionSetFiltered <- private$.listafy(parentCohortDefinitionSet) + sharedResource["cohortDefinitions"] <- list(cohortDefinitionSetFiltered) + + if (length(subsetDefinitions)) { + # Subset definitions + subsetDefinitionsJson <- lapply(subsetDefinitions, function(x) { + x$toJSON() + }) + sharedResource["subsetDefs"] <- list(subsetDefinitionsJson) + + # Filter to the subsets + subsetCohortDefinitionSet <- cohortDefinitionSet[cohortDefinitionSet$isSubset, ] + subsetIdMapping <- list() + for (i in 1:nrow(subsetCohortDefinitionSet)) { + idMapping <- list( + cohortId = subsetCohortDefinitionSet$cohortId[i], + subsetId = subsetCohortDefinitionSet$subsetDefinitionId[i], + targetCohortId = subsetCohortDefinitionSet$subsetParent[i] + ) + subsetIdMapping[[i]] <- idMapping + } + sharedResource["cohortSubsets"] <- list(subsetIdMapping) + } + + sharedResource <- super$createSharedResourcesSpecifications( + className = self$cohortDefinitionSharedResourcesClassName, + sharedResourcesSpecifications = sharedResource + ) + return(sharedResource) + }, + #' @description Create shared specifications for the negative control outcomes cohort set + #' @param negativeControlOutcomeCohortSet The negative control outcome cohort + #' definition set defines the concepts to use to construct negative control + #' outcome cohorts. See the CohortGenerator package for more details. + #' @param occurrenceType Either "first" or "all + #' @param detectOnDescendants When TRUE, the concept ID for the negative + #' control will use the `concept_ancestor` table and will detect + #' descendant concepts when constructing the cohort. + createNegativeControlOutcomeCohortSharedResourceSpecifications = function(negativeControlOutcomeCohortSet, + occurrenceType, + detectOnDescendants) { + negativeControlOutcomeCohortSet <- apply(negativeControlOutcomeCohortSet, 1, as.list) + sharedResource <- list( + negativeControlOutcomes = list( + negativeControlOutcomeCohortSet = negativeControlOutcomeCohortSet, + occurrenceType = occurrenceType, + detectOnDescendants = detectOnDescendants + ) + ) + sharedResource <- super$createSharedResourcesSpecifications( + className = self$negativeControlOutcomeSharedResourcesClassName, + sharedResourcesSpecifications = sharedResource + ) + return(sharedResource) + }, + #' @description Validate the module specifications + #' @param moduleSpecifications The CohortGenerator module specifications + validateModuleSpecifications = function(moduleSpecifications) { + super$validateModuleSpecifications( + moduleSpecifications = moduleSpecifications + ) + }, + #' @description Validate the cohort shared resource specifications + #' @param cohortSharedResourceSpecifications The cohort shared resource specifications + validateCohortSharedResourceSpecifications = function(cohortSharedResourceSpecifications) { + super$validateSharedResourcesSpecifications( + className = self$cohortDefinitionSharedResourcesClassName, + sharedResourcesSpecifications = cohortSharedResourceSpecifications + ) + }, + #' @description Validate the cohort shared resource specifications + #' @param negativeControlOutcomeCohortSharedResourceSpecifications The cohort shared resource specifications + validateNegativeControlOutcomeCohortSharedResourceSpecifications = function(negativeControlOutcomeCohortSharedResourceSpecifications) { + super$validateSharedResourcesSpecifications( + className = self$negativeControlOutcomeSharedResourcesClassName, + sharedResourcesSpecifications = negativeControlOutcomeCohortSharedResourceSpecifications + ) + } + ), + private = list( + .listafy = function(df) { + mylist <- list() + for (i in 1:nrow(df)) { + cohortData <- list( + cohortId = df$cohortId[i], + cohortName = df$cohortName[i], + cohortDefinition = df$json[i] + ) + mylist[[i]] <- cohortData + } + return(mylist) + } + ) +) diff --git a/R/Module-CohortIncidence.R b/R/Module-CohortIncidence.R new file mode 100644 index 00000000..0da88459 --- /dev/null +++ b/R/Module-CohortIncidence.R @@ -0,0 +1,214 @@ +# CohortIncidenceModule ------------- +#' @title Module for computing incidence rates for cohorts against an OMOP CDM +#' @export +#' @description +#' Computes incidence rates for cohorts against the OMOP CDM +CohortIncidenceModule <- R6::R6Class( + classname = "CohortIncidenceModule", + inherit = StrategusModule, + public = list( + #' @field tablePrefix The table prefix to append to results tables + tablePrefix = "ci_", + #' @description Initialize the module + initialize = function() { + super$initialize() + }, + #' @description Execute the CohortIncidence package + #' @template connectionDetails + #' @template analysisSpecifications + #' @template executionSettings + execute = function(connectionDetails, analysisSpecifications, executionSettings) { + super$execute(connectionDetails, analysisSpecifications, executionSettings) + checkmate::assertClass(executionSettings, "CdmExecutionSettings") + + refId <- 1 # this should be part of execution settings + resultsFolder <- private$jobContext$moduleExecutionSettings$resultsSubFolder + + # Establish the connection and ensure the cleanup is performed + connection <- DatabaseConnector::connect(connectionDetails) + on.exit(DatabaseConnector::disconnect(connection)) + + # extract CohortIncidence design from jobContext + irDesign <- CohortIncidence::IncidenceDesign$new(private$jobContext$settings$irDesign) + irDesignJSON <- as.character(irDesign$asJSON()) + + # construct buildOptions from executionSettings + # Questions: + # Will there be a subgroup cohort table? + # Are we pulling the source name from the right place? + + buildOptions <- CohortIncidence::buildOptions( + cohortTable = paste0(private$jobContext$moduleExecutionSettings$workDatabaseSchema, ".", private$jobContext$moduleExecutionSettings$cohortTableNames$cohortTable), + cdmDatabaseSchema = private$jobContext$moduleExecutionSettings$cdmDatabaseSchema, + sourceName = as.character(private$jobContext$moduleExecutionSettings$databaseId), + refId = refId + ) + + executeResults <- CohortIncidence::executeAnalysis( + connection = connection, + incidenceDesign = irDesignJSON, + buildOptions = buildOptions + ) + + # Export the results + exportFolder <- private$jobContext$moduleExecutionSettings$resultsSubFolder + if (!dir.exists(exportFolder)) { + dir.create(exportFolder, recursive = TRUE) + } + + private$.message("Export data") + + # apply minCellCount to executeResults + minCellCount <- private$jobContext$moduleExecutionSettings$minCellCount + if (minCellCount > 0) { + executeResults$incidence_summary <- private$.enforceMinCellValue(executeResults$incidence_summary, "PERSONS_AT_RISK_PE", minCellCount) + executeResults$incidence_summary <- private$.enforceMinCellValue(executeResults$incidence_summary, "PERSONS_AT_RISK", minCellCount) + executeResults$incidence_summary <- private$.enforceMinCellValue(executeResults$incidence_summary, "PERSON_OUTCOMES_PE", minCellCount) + executeResults$incidence_summary <- private$.enforceMinCellValue(executeResults$incidence_summary, "PERSON_OUTCOMES", minCellCount) + executeResults$incidence_summary <- private$.enforceMinCellValue(executeResults$incidence_summary, "OUTCOMES_PE", minCellCount) + executeResults$incidence_summary <- private$.enforceMinCellValue(executeResults$incidence_summary, "OUTCOMES", minCellCount) + executeResults$incidence_summary <- private$.enforceMinCellStats(executeResults$incidence_summary) + } + + for (tableName in names(executeResults)) { + tableData <- executeResults[[tableName]] + if (tableName == 'incidence_summary') { + if (nrow(tableData) > 0) { + tableData$database_id <- private$jobContext$moduleExecutionSettings$databaseId + } else { + tableData$database_id <- character(0) + } + } + readr::write_csv(tableData, file.path(exportFolder, paste0(self$tablePrefix,tableName,".csv"))) + } + + # in addition to the output of the module, we will produce a T-O lookup table that can be used to filter results + # to either 'Outcomes for T' or 'Targets for Outcomes' + + targetOutcomeDfList <- lapply(irDesign$analysisList, function(analysis) { + outcomeDefs <- Filter(function (o) o$id %in% analysis$outcomes, irDesign$outcomeDefs) + outcome_cohort_id <- sapply(outcomeDefs, function(o) o$cohortId) + as.data.frame(expand.grid(target_cohort_id = analysis$targets, outcome_cohort_id = outcome_cohort_id)) + }) + + target_outcome_ref <- unique(do.call(rbind, targetOutcomeDfList)) + target_outcome_ref$ref_id <- refId + readr::write_csv(target_outcome_ref, file.path(exportFolder, paste0(self$tablePrefix,"target_outcome_ref",".csv"))) + + resultsDataModel <- private$.getResultsDataModelSpecification() + readr::write_csv(resultsDataModel, file.path(exportFolder, "resultsDataModelSpecification.csv")) + + private$.message(paste("Results available at:", resultsFolder)) + }, + #' @description Create the results data model for the module + #' @template resultsConnectionDetails + #' @template resultsDatabaseSchema + #' @template tablePrefix + createResultsDataModel = function(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix = "") { + super$createResultsDataModel(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix) + if (resultsConnectionDetails$dbms == "sqlite" & resultsDatabaseSchema != "main") { + stop("Invalid schema for sqlite, use databaseSchema = 'main'") + } + + connection <- DatabaseConnector::connect(resultsConnectionDetails) + on.exit(DatabaseConnector::disconnect(connection)) + + # Create the results model + sql <- ResultModelManager::generateSqlSchema(schemaDefinition = private$.getResultsDataModelSpecification()) + sql <- SqlRender::render(sql= sql, warnOnMissingParameters = TRUE, database_schema = resultsDatabaseSchema) + sql <- SqlRender::translate(sql = sql, targetDialect = resultsConnectionDetails$dbms) + DatabaseConnector::executeSql(connection, sql) + }, + #' @description Upload the results for the module + #' @template resultsConnectionDetails + #' @template analysisSpecifications + #' @template resultsDataModelSettings + uploadResults = function(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) { + super$uploadResults(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) + resultsFolder <- private$jobContext$moduleExecutionSettings$resultsSubFolder + exportFolder <- private$jobContext$moduleExecutionSettings$resultsSubFolder + + # use the results model spec that was saved along with the results output, not the embedded model spec. + resultsModelSpec <- readr::read_csv( + file = file.path(file.path(exportFolder, "resultsDataModelSpecification.csv")), + show_col_types = FALSE + ) + + ResultModelManager::uploadResults( + connectionDetails = resultsConnectionDetails, + schema = resultsDataModelSettings$resultsDatabaseSchema, + resultsFolder = resultsFolder, + purgeSiteDataBeforeUploading = FALSE, + specifications = resultsModelSpec + ) + }, + #' @description Creates the CohortIncidence Module Specifications + #' @param irDesign The incidence rate design created from the CohortIncidence + #' package + createModuleSpecifications = function(irDesign = NULL) { + analysis <- list() + for (name in names(formals(self$createModuleSpecifications))) { + analysis[[name]] <- get(name) + } + + specifications <- super$createModuleSpecifications( + moduleSpecifications = analysis + ) + return(specifications) + }, + #' @description Validate the module specifications + #' @param moduleSpecifications The CohortIncidence module specifications + validateModuleSpecifications = function(moduleSpecifications) { + super$validateModuleSpecifications( + moduleSpecifications = moduleSpecifications + ) + irDesign <- CohortIncidence::IncidenceDesign$new(moduleSpecifications$settings$irDesign) + designJson <- rJava::J("org.ohdsi.analysis.cohortincidence.design.CohortIncidence")$fromJson(as.character(irDesign$asJSON())) + + invisible(designJson) + } + ), + private = list( + .enforceMinCellValue = function(data, fieldName, minValues, silent = FALSE) { + toCensor <- !is.na(data[, fieldName]) & data[, fieldName] < minValues & data[, fieldName] != 0 + if (!silent) { + percent <- round(100 * sum(toCensor) / nrow(data), 1) + private$.message( + " censoring ", + sum(toCensor), + " values (", + percent, + "%) from ", + fieldName, + " because value below minimum" + ) + } + data[toCensor, fieldName] <- -minValues + return(data) + }, + .enforceMinCellStats = function(data) { + # replace rates with NA for cencored outcomes + toCensor <- data[, "OUTCOMES"] < 0 + data[toCensor, "INCIDENCE_RATE_P100PY"] <- NA + + # replace proportions with NA for censored person_outcomes + toCensor <- data[, "PERSON_OUTCOMES"] < 0 + data[toCensor, "INCIDENCE_PROPORTION_P100P"] <- NA + + return(data) + }, + .getResultsDataModelSpecification = function() { + rdms <- CohortGenerator::readCsv( + file = private$.getResultsDataModelSpecificationFileLocation() + ) + rdms$tableName <-paste0(self$tablePrefix, rdms$tableName) + return(rdms) + }, + .getResultsDataModelSpecificationFileLocation = function() { + return(system.file( + file.path("csv", "cohortIncidenceRdms.csv"), + package = "Strategus" + )) + } + ) +) diff --git a/R/Module-CohortMethod.R b/R/Module-CohortMethod.R new file mode 100644 index 00000000..0d78b3f8 --- /dev/null +++ b/R/Module-CohortMethod.R @@ -0,0 +1,172 @@ +# CohortMethodModule ------------- +#' @title Module for performing new-user cohort studies +#' @export +#' @description +#' Module for performing new-user cohort studies in an observational +#' database in the OMOP Common Data Model. +CohortMethodModule <- R6::R6Class( + classname = "CohortMethodModule", + inherit = StrategusModule, + public = list( + #' @description Initialize the module + initialize = function() { + super$initialize() + }, + #' @description Executes the CohortMethod package + #' @template connectionDetails + #' @param analysisSpecifications The analysis specifications for the study + #' @template executionSettings + execute = function(connectionDetails, analysisSpecifications, executionSettings) { + super$execute(connectionDetails, analysisSpecifications, executionSettings) + checkmate::assertClass(executionSettings, "CdmExecutionSettings") + + jobContext <- private$jobContext + multiThreadingSettings <- CohortMethod::createDefaultMultiThreadingSettings(parallel::detectCores()) + + args <- jobContext$settings + args$connectionDetails <- connectionDetails + args$cdmDatabaseSchema <- jobContext$moduleExecutionSettings$cdmDatabaseSchema + args$exposureDatabaseSchema <- jobContext$moduleExecutionSettings$workDatabaseSchema + args$exposureTable <- jobContext$moduleExecutionSettings$cohortTableNames$cohortTable + args$outcomeDatabaseSchema <- jobContext$moduleExecutionSettings$workDatabaseSchema + args$outcomeTable <- jobContext$moduleExecutionSettings$cohortTableNames$cohortTable + args$outputFolder <- jobContext$moduleExecutionSettings$workSubFolder + args$multiThreadingSettings <- multiThreadingSettings + args$cmDiagnosticThresholds <- NULL + do.call(CohortMethod::runCmAnalyses, args) + + exportFolder <- jobContext$moduleExecutionSettings$resultsSubFolder + CohortMethod::exportToCsv( + outputFolder = jobContext$moduleExecutionSettings$workSubFolder, + exportFolder = exportFolder, + databaseId = jobContext$moduleExecutionSettings$databaseId, + minCellCount = jobContext$moduleExecutionSettings$minCellCount, + maxCores = parallel::detectCores(), + cmDiagnosticThresholds = jobContext$settings$cmDiagnosticThresholds + ) + # TODO: Removing this to make the upload easier + #unlink(file.path(exportFolder, sprintf("Results_%s.zip", jobContext$moduleExecutionSettings$databaseId))) + + resultsDataModel <- CohortGenerator::readCsv(file = system.file("csv", "resultsDataModelSpecification.csv", package = "CohortMethod")) + CohortGenerator::writeCsv( + x = resultsDataModel, + file = file.path(exportFolder, "resultsDataModelSpecification.csv"), + warnOnFileNameCaseMismatch = FALSE + ) + private$.message(paste("Results available at:", exportFolder)) + }, + #' @description Create the results data model for the module + #' @template resultsConnectionDetails + #' @template resultsDatabaseSchema + #' @template tablePrefix + createResultsDataModel = function(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix = "") { + super$createResultsDataModel(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix) + CohortMethod::createResultsDataModel( + connectionDetails = resultsConnectionDetails, + databaseSchema = resultsDatabaseSchema, + tablePrefix = tablePrefix + ) + }, + #' @description Upload the results for the module + #' @template resultsConnectionDetails + #' @template analysisSpecifications + #' @template resultsDataModelSettings + uploadResults = function(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) { + super$uploadResults(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) + + # TODO: This is something CM does differently. + # Find the results zip file in the results sub folder + resultsFolder <- private$jobContext$moduleExecutionSettings$resultsSubFolder + zipFiles <- list.files( + path = resultsFolder, + pattern = "\\.zip$", + full.names = TRUE + ) + + if (length(zipFiles) > 0) { + zipFileName <- zipFiles[1] + } else { + # Create a zip file from the results in the directory + DatabaseConnector::createZipFile( + zipFile = "results.zip", + files = list.files(resultsFolder, pattern = ".*\\.csv$"), + rootFolder = resultsFolder + ) + zipFileName <- file.path(resultsFolder, "results.zip") + } + + # TODO: This function does not expose + # a way to specify the database identifier file + # which makes the purge problematic since I'm + # not sure how it will know what to purge... + CohortMethod::uploadResults( + connectionDetails = resultsConnectionDetails, + schema = resultsDataModelSettings$resultsDatabaseSchema, + zipFileName = zipFileName, + purgeSiteDataBeforeUploading = FALSE + ) + }, + #' @description Creates the CohortMethod Module Specifications + #' + #' @details + #' Run a list of analyses for the target-comparator-outcomes of interest. This function will run all + #' specified analyses against all hypotheses of interest, meaning that the total number of outcome + #' models is `length(cmAnalysisList) * length(targetComparatorOutcomesList)` (if all analyses specify an + #' outcome model should be fitted). When you provide several analyses it will determine whether any of + #' the analyses have anything in common, and will take advantage of this fact. For example, if we + #' specify several analyses that only differ in the way the outcome model is fitted, then this + #' function will extract the data and fit the propensity model only once, and re-use this in all the + #' analysis. + #' + #' After completion, a tibble containing references to all generated files can be obtained using the + #' [CohortMethod::getFileReference()] function. A summary of the analysis results can be obtained using the + #' [CohortMethod::getResultsSummary()] function. + #' + #' ## Analyses to Exclude + #' + #' Normally, `runCmAnalyses` will run all combinations of target-comparator-outcome-analyses settings. + #' However, sometimes we may not need all those combinations. Using the `analysesToExclude` argument, + #' we can remove certain items from the full matrix. This argument should be a data frame with at least + #' one of the following columns: + #' + #' @param cmAnalysisList A list of objects of type `cmAnalysis` as created using + #' the `[CohortMethod::createCmAnalysis] function. + #' @param targetComparatorOutcomesList A list of objects of type `targetComparatorOutcomes` as + #' created using the [CohortMethod::createTargetComparatorOutcomes] + #' function. + #' @param analysesToExclude Analyses to exclude. See the Analyses to Exclude section for details. + #' @param refitPsForEveryOutcome Should the propensity model be fitted for every outcome (i.e. + #' after people who already had the outcome are removed)? If + #' false, a single propensity model will be fitted, and people + #' who had the outcome previously will be removed afterwards. + #' @param refitPsForEveryStudyPopulation Should the propensity model be fitted for every study population + #' definition? If false, a single propensity model will be fitted, + #' and the study population criteria will be applied afterwards. + #' @param cmDiagnosticThresholds An object of type `CmDiagnosticThresholds` as created using + #' [CohortMethod::createCmDiagnosticThresholds()]. + #' + createModuleSpecifications = function(cmAnalysisList, + targetComparatorOutcomesList, + analysesToExclude = NULL, + refitPsForEveryOutcome = FALSE, + refitPsForEveryStudyPopulation = TRUE, + cmDiagnosticThresholds = CohortMethod::createCmDiagnosticThresholds()) { + analysis <- list() + for (name in names(formals(self$createModuleSpecifications))) { + analysis[[name]] <- get(name) + } + + specifications <- super$createModuleSpecifications( + moduleSpecifications = analysis + ) + return(specifications) + }, + #' @description Validate the module specifications + #' @param moduleSpecifications The CohortMethod module specifications + validateModuleSpecifications = function(moduleSpecifications) { + super$validateModuleSpecifications( + moduleSpecifications = moduleSpecifications + ) + } + ) +) diff --git a/R/Module-EvidenceSynthesis.R b/R/Module-EvidenceSynthesis.R new file mode 100644 index 00000000..a4e71453 --- /dev/null +++ b/R/Module-EvidenceSynthesis.R @@ -0,0 +1,985 @@ +# EvidenceSynthesisModule ------------- +#' @title Module for for combining causal effect estimates and study diagnostics +#' across multiple data sites in a distributed study. This includes functions +#' for performing meta-analysis and forest plots +#' @export +#' @description +#' Module for for combining causal effect estimates and study diagnostics +#' across multiple data sites in a distributed study. This includes functions +#' for performing meta-analysis and forest plots +EvidenceSynthesisModule <- R6::R6Class( + classname = "EvidenceSynthesisModule", + inherit = StrategusModule, + public = list( + #' @description Initialize the module + initialize = function() { + super$initialize() + }, + #' @description Executes the EvidenceSynthesis package + #' @template connectionDetails + #' @template analysisSpecifications + #' @template executionSettings + execute = function(connectionDetails, analysisSpecifications, executionSettings) { + super$execute(connectionDetails, analysisSpecifications, executionSettings) + checkmate::assertClass(executionSettings, "ResultsExecutionSettings") + jobContext <- private$jobContext + + resultsFolder <- jobContext$moduleExecutionSettings$resultsSubFolder + if (!dir.exists(resultsFolder)) { + dir.create( + path = resultsFolder, + recursive = T + ) + } + private$.writeAnalysisSpecs( + analysisSpecs = jobContext$settings$evidenceSynthesisAnalysisList, + resultsFolder = resultsFolder + ) + + private$.executeEvidenceSynthesis( + connectionDetails = connectionDetails, + databaseSchema = jobContext$moduleExecutionSettings$resultsDatabaseSchema, + settings = jobContext$settings$evidenceSynthesisAnalysisList, + esDiagnosticThresholds = jobContext$settings$esDiagnosticThresholds, + resultsFolder = resultsFolder, + minCellCount = jobContext$moduleExecutionSettings$minCellCount + ) + + file.copy( + from = private$.getResultsDataModelSpecificationFileLocation(), + to = file.path(jobContext$moduleExecutionSettings$resultsSubFolder, "resultsDataModelSpecification.csv") + ) + }, + #' @description Create the results data model for the module + #' @template resultsConnectionDetails + #' @template resultsDatabaseSchema + #' @template tablePrefix + createResultsDataModel = function(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix = "") { + super$createResultsDataModel(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix) + if (resultsConnectionDetails$dbms == "sqlite" & resultsDatabaseSchema != "main") { + stop("Invalid schema for sqlite, use databaseSchema = 'main'") + } + + connection <- DatabaseConnector::connect(resultsConnectionDetails) + on.exit(DatabaseConnector::disconnect(connection)) + + # Create the results model + sql <- ResultModelManager::generateSqlSchema( + csvFilepath = private$.getResultsDataModelSpecificationFileLocation() + ) + sql <- SqlRender::render(sql= sql, warnOnMissingParameters = TRUE, database_schema = resultsDatabaseSchema) + sql <- SqlRender::translate(sql = sql, targetDialect = resultsConnectionDetails$dbms) + DatabaseConnector::executeSql(connection, sql) + }, + #' @description Upload the results for the module + #' @template resultsConnectionDetails + #' @template analysisSpecifications + #' @template resultsDataModelSettings + uploadResults = function(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) { + super$uploadResults(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) + jobContext <- private$jobContext + resultsFolder <- jobContext$moduleExecutionSettings$resultsSubFolder + + ResultModelManager::uploadResults( + connectionDetails = resultsConnectionDetails, + schema = resultsDataModelSettings$resultsDatabaseSchema, + resultsFolder = resultsFolder, + purgeSiteDataBeforeUploading = FALSE, # ES is not site specific + specifications = private$.getResultsDataModelSpecification() + ) + }, + #' @description Validate the module specifications + #' @param moduleSpecifications The EvidenceSynthesis module specifications + validateModuleSpecifications = function(moduleSpecifications) { + super$validateModuleSpecifications( + moduleSpecifications = moduleSpecifications + ) + }, + #' Create an evidence synthesis source + #' + #' @param sourceMethod The source method generating the estimates to synthesize. Can be "CohortMethod" or + #' "SelfControlledCaseSeries" + #' @param databaseIds The database IDs to include. Use `databaseIds = NULL` to include all database IDs. + #' @param analysisIds The source method analysis IDs to include. Use `analysisIds = NULL` to include all + #' analysis IDs. + #' @param likelihoodApproximation The type of likelihood approximation. Can be "adaptive grid" or "normal". + #' + #' @return + #' An object of type `EvidenceSynthesisSource`. + createEvidenceSynthesisSource = function(sourceMethod = "CohortMethod", + databaseIds = NULL, + analysisIds = NULL, + likelihoodApproximation = "adaptive grid") { + errorMessages <- checkmate::makeAssertCollection() + checkmate::assertChoice(sourceMethod, c("CohortMethod", "SelfControlledCaseSeries"), add = errorMessages) + if (is.character(databaseIds)) { + checkmate::assertCharacter(databaseIds, null.ok = TRUE, add = errorMessages) + } else { + checkmate::assertIntegerish(databaseIds, null.ok = TRUE, add = errorMessages) + } + checkmate::assertIntegerish(analysisIds, null.ok = TRUE, add = errorMessages) + checkmate::assertChoice(likelihoodApproximation, c("adaptive grid", "normal"), add = errorMessages) + checkmate::reportAssertions(collection = errorMessages) + + analysis <- list() + for (name in names(formals(self$createEvidenceSynthesisSource))) { + analysis[[name]] <- get(name) + } + class(analysis) <- "EvidenceSynthesisSource" + return(analysis) + }, + #' Create parameters for a random-effects meta-analysis + #' + #' @details + #' Use DerSimonian-Laird meta-analysis + #' + #' @param alpha The alpha (expected type I error) used for the confidence intervals. + #' @param evidenceSynthesisAnalysisId description + #' @param evidenceSynthesisDescription description + #' @param evidenceSynthesisSource description + #' @param controlType description + createRandomEffectsMetaAnalysis = function(alpha = 0.05, + evidenceSynthesisAnalysisId = 1, + evidenceSynthesisDescription = "Random-effects", + evidenceSynthesisSource = NULL, + controlType = "outcome") { + if (evidenceSynthesisSource$likelihoodApproximation != "normal") { + stop("Random-effects meta-analysis only supports normal approximation of the likelihood.") + } + analysis <- list() + for (name in names(formals(self$createRandomEffectsMetaAnalysis))) { + analysis[[name]] <- get(name) + } + class(analysis) <- c("RandomEffectsMetaAnalysis", "EvidenceSynthesisAnalysis") + return(analysis) + }, + #' Create a parameter object for the function computeFixedEffectMetaAnalysis + #' + #' @details + #' Create an object defining the parameter values. + #' + #' @param alpha The alpha (expected type I error) used for the confidence intervals. + #' @param evidenceSynthesisAnalysisId description + #' @param evidenceSynthesisDescription description + #' @param evidenceSynthesisSource description + #' @param controlType description + createFixedEffectsMetaAnalysis = function(alpha = 0.05, + evidenceSynthesisAnalysisId = 1, + evidenceSynthesisDescription = "Fixed-effects", + evidenceSynthesisSource = NULL, + controlType = "outcome") { + analysis <- list() + for (name in names(formals(self$createFixedEffectsMetaAnalysis))) { + analysis[[name]] <- get(name) + } + class(analysis) <- c("FixedEffectsMetaAnalysis", "EvidenceSynthesisAnalysis") + if (evidenceSynthesisSource$likelihoodApproximation != "normal") + stop("Fixed-effects meta-analysis only supports normal approximation of the likelihood.") + return(analysis) + }, + #' Create a parameter object for the function computeBayesianMetaAnalysis + #' + #' @details + #' Create an object defining the parameter values. + #' + #' @param chainLength Number of MCMC iterations. + #' @param burnIn Number of MCMC iterations to consider as burn in. + #' @param subSampleFrequency Subsample frequency for the MCMC. + #' @param priorSd A two-dimensional vector with the standard deviation of the prior for mu and tau, respectively. + #' @param alpha The alpha (expected type I error) used for the credible intervals. + #' @param robust Whether or not to use a t-distribution model; default: FALSE. + #' @param df Degrees of freedom for the t-model, only used if robust is TRUE. + #' @param seed The seed for the random number generator. + #' @param evidenceSynthesisAnalysisId description + #' @param evidenceSynthesisDescription description + #' @param evidenceSynthesisSource description + #' @param controlType description + createBayesianMetaAnalysis = function(chainLength = 1100000, + burnIn = 1e+05, + subSampleFrequency = 100, + priorSd = c(2, 0.5), + alpha = 0.05, + robust = FALSE, + df = 4, + seed = 1, + evidenceSynthesisAnalysisId = 1, + evidenceSynthesisDescription = "Bayesian random-effects", + evidenceSynthesisSource = NULL, + controlType = "outcome") { + analysis <- list() + for (name in names(formals(self$createBayesianMetaAnalysis))) { + analysis[[name]] <- get(name) + } + class(analysis) <- c("BayesianMetaAnalysis", "EvidenceSynthesisAnalysis") + return(analysis) + }, + #' Create EvidenceSynthesis diagnostics thresholds + #' + #' @description + #' Threshold used to determine if we pass or fail diagnostics. + #' + #' @param mdrrThreshold What is the maximum allowed minimum detectable relative risk + #' (MDRR)? + #' @param easeThreshold What is the maximum allowed expected absolute systematic error + #' (EASE). + #' @param i2Threshold What is the maximum allowed I^2 (measure of between-database + #' heterogeneity in random-effects models)? + #' @param tauThreshold What is the maximum allowed tau (measure of between-database + #' heterogeneity in Bayesian random-effects models)? + #' + #' @return + #' An object of type `EsDiagnosticThresholds`. + createEsDiagnosticThresholds = function(mdrrThreshold = 10, + easeThreshold = 0.25, + i2Threshold = 0.4, + tauThreshold = log(2)) { + errorMessages <- checkmate::makeAssertCollection() + checkmate::assertNumeric(mdrrThreshold, len = 1, lower = 0, add = errorMessages) + checkmate::assertNumeric(easeThreshold, len = 1, lower = 0, add = errorMessages) + checkmate::assertNumeric(i2Threshold, len = 1, lower = 0, add = errorMessages) + checkmate::assertNumeric(tauThreshold, len = 1, lower = 0, add = errorMessages) + checkmate::reportAssertions(collection = errorMessages) + thresholds <- list( + mdrrThreshold = mdrrThreshold, + easeThreshold = easeThreshold, + i2Threshold = i2Threshold, + tauThreshold = tauThreshold + ) + class(thresholds) <- "EsDiagnosticThresholds" + return(thresholds) + }, + #' @description Creates the module Specifications + #' @param evidenceSynthesisAnalysisList A list of objects of type `EvidenceSynthesisAnalysis` as generated + #' by either the \href{../../Strategus/html/EvidenceSynthesisModule.html#method-createFixedEffectsMetaAnalysis}{\code{EvidenceSynthesisModule$createFixedEffectsMetaAnalysis()}} + #' or \href{../../Strategus/html/EvidenceSynthesisModule.html#method-createBayesianMetaAnalysis}{\code{EvidenceSynthesisModule$createBayesianMetaAnalysis()}} function. + #' @param esDiagnosticThresholds An object of type`EsDiagnosticThresholds` as generated by the + #' \href{../../Strategus/html/EvidenceSynthesisModule.html#method-createEsDiagnosticThresholds}{\code{EvidenceSynthesisModule$createEsDiagnosticThresholds()}} function. + createModuleSpecifications = function(evidenceSynthesisAnalysisList, + esDiagnosticThresholds = self$createEsDiagnosticThresholds()) { + errorMessages <- checkmate::makeAssertCollection() + checkmate::assertList(evidenceSynthesisAnalysisList, min.len = 1, add = errorMessages) + for (i in 1:length(evidenceSynthesisAnalysisList)) { + checkmate::assertClass(evidenceSynthesisAnalysisList[[i]], "EvidenceSynthesisAnalysis", add = errorMessages) + } + checkmate::assertClass(esDiagnosticThresholds, "EsDiagnosticThresholds", add = errorMessages) + checkmate::reportAssertions(collection = errorMessages) + + analysis <- list() + for (name in names(formals(self$createModuleSpecifications))) { + analysis[[name]] <- get(name) + } + + specifications <- super$createModuleSpecifications( + moduleSpecifications = analysis + ) + return(specifications) + } + ), + private = list( + .writeAnalysisSpecs = function(analysisSpecs, resultsFolder) { + message("Writing evidence synthesis analysis specifications") + tempFileName <- tempfile() + evidenceSynthesisAnalysis <- tibble() + for (analysisSettings in analysisSpecs) { + ParallelLogger::saveSettingsToJson(analysisSettings, tempFileName) + analysis <- tibble( + evidenceSynthesisAnalysisId = analysisSettings$evidenceSynthesisAnalysisId, + evidenceSynthesisDescription = analysisSettings$evidenceSynthesisDescription, + sourceMethod = analysisSettings$evidenceSynthesisSource$sourceMethod, + ) |> + mutate(definition = readChar(tempFileName, file.info(tempFileName)$size)) + evidenceSynthesisAnalysis <- bind_rows(evidenceSynthesisAnalysis, analysis) + } + unlink(tempFileName) + fileName <- file.path(resultsFolder, "es_analysis.csv") + CohortGenerator::writeCsv(evidenceSynthesisAnalysis, fileName) + }, + .ensureEmptyAndExists = function(outputTable, resultsFolder) { + diagnostics <- private$.createEmptyResult(outputTable) + fileName <- file.path(resultsFolder, paste0(outputTable, ".csv")) + private$.writeToCsv(data = diagnostics, fileName = fileName, append = FALSE) + }, + .executeEvidenceSynthesis = function(connectionDetails, databaseSchema, settings, esDiagnosticThresholds, resultsFolder, minCellCount) { + connection <- DatabaseConnector::connect(connectionDetails) + on.exit(DatabaseConnector::disconnect(connection)) + + outputTables <- c( + "es_cm_result", + "es_cm_diagnostics_summary", + "es_sccs_result", + "es_sccs_diagnostics_summary" + ) + invisible(lapply(outputTables, function(x) { + private$.ensureEmptyAndExists(x, resultsFolder) + })) + + message("Performing evidence synthesis across databases") + invisible(lapply( + X = settings, + FUN = private$.doAnalysis, + connection = connection, + databaseSchema = databaseSchema, + resultsFolder = resultsFolder, + minCellCount = minCellCount, + esDiagnosticThresholds = esDiagnosticThresholds + )) + }, + # analysisSettings = settings[[4]] + .doAnalysis = function(analysisSettings, connection, databaseSchema, resultsFolder, minCellCount, esDiagnosticThresholds) { + perDbEstimates <- private$.getPerDatabaseEstimates( + connection = connection, + databaseSchema = databaseSchema, + evidenceSynthesisSource = analysisSettings$evidenceSynthesisSource + ) + if (nrow(perDbEstimates$estimates) == 0) { + message <- sprintf( + "No unblinded estimates found for source method '%s'", + analysisSettings$evidenceSynthesisSource$sourceMethod + ) + if (!is.null(analysisSettings$evidenceSynthesisSource$databaseIds)) { + message <- sprintf( + "%s restricting to database IDs %s", + message, + paste(analysisSettings$evidenceSynthesisSource$databaseIds, collapse = ", ") + ) + } + if (!is.null(analysisSettings$evidenceSynthesisSource$analysisIds)) { + message <- sprintf( + "%s restricting to analysis IDs %s", + message, + paste(analysisSettings$evidenceSynthesisSource$analysisIds, collapse = ", ") + ) + } + warning(message) + return() + } + + fullKeys <- perDbEstimates$estimates[, c(perDbEstimates$key, "analysisId")] |> + distinct() + + cluster <- ParallelLogger::makeCluster(min(10, parallel::detectCores())) + ParallelLogger::clusterRequire(cluster, "dplyr") + on.exit(ParallelLogger::stopCluster(cluster)) + + message(sprintf("Performing analysis %s (%s)", analysisSettings$evidenceSynthesisAnalysisId, analysisSettings$evidenceSynthesisDescription)) + estimates <- ParallelLogger::clusterApply( + cluster = cluster, + x = split(fullKeys, seq_len(nrow(fullKeys))), + fun = private$.doSingleEvidenceSynthesis, + perDbEstimates = perDbEstimates, + analysisSettings = analysisSettings, + minCellCount = minCellCount + ) + estimates <- bind_rows(estimates) + + message("- Calibrating estimates") + estimates <- estimates |> + inner_join(perDbEstimates$trueEffectSizes, by = intersect(names(estimates), names(perDbEstimates$trueEffectSizes))) + if (analysisSettings$controlType == "outcome") { + if (analysisSettings$evidenceSynthesisSource$sourceMethod == "CohortMethod") { + controlKey <- c("targetId", "comparatorId", "analysisId") + } else if (analysisSettings$evidenceSynthesisSource$sourceMethod == "SelfControlledCaseSeries") { + controlKey <- c("covariateId", "analysisId") + } + } else if (analysisSettings$controlType == "exposure") { + if (analysisSettings$evidenceSynthesisSource$sourceMethod == "CohortMethod") { + controlKey <- c("outcomeId", "analysisId") + } else if (analysisSettings$evidenceSynthesisSource$sourceMethod == "SelfControlledCaseSeries") { + controlKey <- c("exposuresOutcomeSetId", "analysisId") + } + } else { + stop(sprintf("Unknown control type '%s'", analysisSettings$controlType)) + } + groupKeys <- estimates[, controlKey] + groupKeys <- apply(groupKeys, 1, paste, collapse = "_") + + estimates <- ParallelLogger::clusterApply( + cluster = cluster, + x = split(estimates, groupKeys), + fun = private$.calibrateEstimates + ) + estimates <- bind_rows(estimates) |> + mutate(evidenceSynthesisAnalysisId = analysisSettings$evidenceSynthesisAnalysisId) + + # Save diagnostics + diagnostics <- estimates[, c(perDbEstimates$key, "analysisId", "evidenceSynthesisAnalysisId", "mdrr", "ease", "i2", "tau")] |> + mutate(mdrrDiagnostic = case_when( + is.na(.data$mdrr) ~ "NOT EVALUATED", + .data$mdrr < esDiagnosticThresholds$mdrrThreshold ~ "PASS", + TRUE ~ "FAIL" + )) |> + mutate(easeDiagnostic = case_when( + is.na(.data$ease) ~ "NOT EVALUATED", + abs(.data$ease) < esDiagnosticThresholds$easeThreshold ~ "PASS", + TRUE ~ "FAIL" + )) |> + mutate(i2Diagnostic = case_when( + is.na(.data$i2) ~ "NOT EVALUATED", + abs(.data$i2) < esDiagnosticThresholds$i2Threshold ~ "PASS", + TRUE ~ "FAIL" + )) |> + mutate(tauDiagnostic = case_when( + is.na(.data$tau) ~ "NOT EVALUATED", + abs(.data$tau) < esDiagnosticThresholds$tauThreshold ~ "PASS", + TRUE ~ "FAIL" + )) |> + mutate(unblind = ifelse(.data$mdrrDiagnostic != "FAIL" & + .data$easeDiagnostic != "FAIL" & + .data$i2Diagnostic != "FAIL" & + .data$tauDiagnostic != "FAIL", 1, 0)) + if (analysisSettings$evidenceSynthesisSource$sourceMethod == "CohortMethod") { + fileName <- file.path(resultsFolder, "es_cm_diagnostics_summary.csv") + } else if (analysisSettings$evidenceSynthesisSource$sourceMethod == "SelfControlledCaseSeries") { + fileName <- file.path(resultsFolder, "es_sccs_diagnostics_summary.csv") + } else { + stop(sprintf("Saving diagnostics summary not implemented for source method '%s'", analysisSettings$evidenceSynthesisSource$sourceMethod)) + } + private$.writeToCsv(data = diagnostics, fileName = fileName, append = TRUE) + + # Save estimates + estimates <- estimates |> + select(-"trueEffectSize", -"ease", -"i2", -"tau", -"mdrr") + if (analysisSettings$evidenceSynthesisSource$sourceMethod == "CohortMethod") { + estimates <- estimates |> + select(-"outcomeOfInterest") + fileName <- file.path(resultsFolder, "es_cm_result.csv") + } else if (analysisSettings$evidenceSynthesisSource$sourceMethod == "SelfControlledCaseSeries") { + fileName <- file.path(resultsFolder, "es_sccs_result.csv") + } else { + stop(sprintf("Saving results not implemented for source method '%s'", analysisSettings$evidenceSynthesisSource$sourceMethod)) + } + private$.writeToCsv(data = estimates, fileName = fileName, append = TRUE) + }, + # group = split(estimates, groupKeys)[[1]] + .calibrateEstimates = function(group) { + ncs <- group[group$trueEffectSize == 1 & !is.na(group$seLogRr), ] + pcs <- group[!is.na(group$trueEffectSize) & group$trueEffectSize != 1 & !is.na(group$seLogRr), ] + if (nrow(ncs) >= 5) { + null <- EmpiricalCalibration::fitMcmcNull(logRr = ncs$logRr, seLogRr = ncs$seLogRr) + ease <- EmpiricalCalibration::computeExpectedAbsoluteSystematicError(null) + calibratedP <- EmpiricalCalibration::calibrateP( + null = null, + logRr = group$logRr, + seLogRr = group$seLogRr, + twoSided = TRUE + ) + calibratedOneSidedP <- EmpiricalCalibration::calibrateP( + null = null, + logRr = group$logRr, + seLogRr = group$seLogRr, + twoSided = FALSE, + upper = TRUE + ) + if (nrow(pcs) >= 5) { + model <- EmpiricalCalibration::fitSystematicErrorModel( + logRr = c(ncs$logRr, pcs$logRr), + seLogRr = c(ncs$seLogRr, pcs$seLogRr), + trueLogRr = log(c(ncs$trueEffectSize, pcs$trueEffectSize)), + estimateCovarianceMatrix = FALSE + ) + } else { + model <- EmpiricalCalibration::convertNullToErrorModel(null) + } + calibratedCi <- EmpiricalCalibration::calibrateConfidenceInterval(model = model, logRr = group$logRr, seLogRr = group$seLogRr) + group$calibratedRr <- exp(calibratedCi$logRr) + group$calibratedCi95Lb <- exp(calibratedCi$logLb95Rr) + group$calibratedCi95Ub <- exp(calibratedCi$logUb95Rr) + group$calibratedP <- calibratedP$p + group$calibratedOneSidedP <- calibratedOneSidedP$p + group$calibratedLogRr <- calibratedCi$logRr + group$calibratedSeLogRr <- calibratedCi$seLogRr + group$ease <- ease$ease + } else { + group$calibratedRr <- NA + group$calibratedCi95Lb <- NA + group$calibratedCi95Ub <- NA + group$calibratedP <- NA + group$calibratedOneSidedP <- NA + group$calibratedLogRr <- NA + group$calibratedSeLogRr <- NA + group$ease <- NA + } + return(group) + }, + # row <- split(fullKeys, seq_len(nrow(fullKeys)))[[2]] + # row <- tibble(targetId = 8413, comparatorId = 8436, outcomeId = 1078, analysisId = 2) + .doSingleEvidenceSynthesis = function(row, perDbEstimates, analysisSettings, minCellCount) { + sumMinCellCount <- function(counts, minCellCount) { + if (length(counts) == 0) { + return(NA) + } + hasNegative <- any(counts < 0) + sumCount <- sum(abs(counts)) + if (sumCount == 0) { + return(sumCount) + } + if (hasNegative) { + if (sumCount < minCellCount) { + sumCount <- -minCellCount + } else { + sumCount <- -sumCount + } + } else { + if (sumCount < minCellCount) { + sumCount <- -minCellCount + } + } + return(sumCount) + } + computeMdrrFromSe <- function(seLogRr, alpha = 0.05, power = 0.8) { + # Based on the computation of a two-sided p-value, power can be computed as + # power = 1-pnorm(qnorm(1 - alpha/2) - (log(mdrr) / seLogRr))/2 + # That can be translated in into: + mdrr <- exp((qnorm(1 - alpha / 2) - qnorm(2 * (1 - power))) * seLogRr) + return(mdrr) + } + + subset <- perDbEstimates$estimates |> + inner_join(row, by = c(perDbEstimates$key, "analysisId")) + llApproximations <- perDbEstimates$llApproximations |> + inner_join(row, by = c(perDbEstimates$key, "analysisId")) + if (analysisSettings$evidenceSynthesisSource$likelihoodApproximation == "normal") { + llApproximations <- llApproximations |> + filter(!is.na(.data$logRr) & !is.na(.data$seLogRr)) + includedDbs <- llApproximations$databaseId + } else if (analysisSettings$evidenceSynthesisSource$likelihoodApproximation == "adaptive grid") { + includedDbs <- unique(llApproximations$databaseId) + llApproximations <- llApproximations |> + select( + point = .data$logRr, + value = .data$logLikelihood, + .data$databaseId + ) |> + group_by(.data$databaseId) |> + group_split() + } + nDatabases <- length(includedDbs) + subset <- subset |> + filter(.data$databaseId %in% includedDbs) + if (analysisSettings$evidenceSynthesisSource$sourceMethod == "CohortMethod") { + counts <- tibble( + targetSubjects = sumMinCellCount(subset$targetSubjects, minCellCount), + comparatorSubjects = sumMinCellCount(subset$comparatorSubjects, minCellCount), + targetDays = sumMinCellCount(subset$targetDays, 0), + comparatorDays = sumMinCellCount(subset$comparatorDays, 0), + targetOutcomes = sumMinCellCount(subset$targetOutcomes, minCellCount), + comparatorOutcomes = sumMinCellCount(subset$comparatorOutcomes, minCellCount), + ) + } else if (analysisSettings$evidenceSynthesisSource$sourceMethod == "SelfControlledCaseSeries") { + counts <- tibble( + outcomeSubjects = sumMinCellCount(subset$outcomeSubjects, minCellCount), + outcomeEvents = sumMinCellCount(subset$outcomeEvents, minCellCount), + outcomeObservationPeriods = sumMinCellCount(subset$outcomeObservationPeriods, 0), + observedDays = sumMinCellCount(subset$observedDays, 0), + covariateSubjects = sumMinCellCount(subset$covariateSubjects, minCellCount), + covariateDays = sumMinCellCount(subset$covariateDays, minCellCount), + covariateEras = sumMinCellCount(subset$covariateEras, minCellCount), + covariateOutcomes = sumMinCellCount(subset$covariateOutcomes, minCellCount) + ) + } else { + stop(sprintf("Aggregating counts not implemented for source method '%s'", analysisSettings$evidenceSynthesisSource$sourceMethod)) + } + + if (nDatabases == 0) { + estimate <- tibble( + rr = as.numeric(NA), + ci95Lb = as.numeric(NA), + ci95Ub = as.numeric(NA), + p = as.numeric(NA), + oneSidedP = as.numeric(NA), + logRr = as.numeric(NA), + seLogRr = as.numeric(NA), + i2 = as.numeric(NA), + tau = as.numeric(NA), + mdrr = as.numeric(Inf) + ) + } else if (nDatabases == 1) { + estimate <- tibble( + rr = exp(subset$logRr), + ci95Lb = subset$ci95Lb, + ci95Ub = subset$ci95Ub, + p = subset$p, + oneSidedP = if ("oneSidedP" %in% colnames(subset)) subset$oneSidedP else NA, + logRr = subset$logRr, + seLogRr = subset$seLogRr, + i2 = NA, + tau = NA, + mdrr = subset$mdrr + ) + } else { + if (is(analysisSettings, "FixedEffectsMetaAnalysis")) { + args <- analysisSettings + args$evidenceSynthesisAnalysisId <- NULL + args$evidenceSynthesisDescription <- NULL + args$evidenceSynthesisSource <- NULL + args$controlType <- NULL + args$data <- as.data.frame(llApproximations) + estimate <- do.call(EvidenceSynthesis::computeFixedEffectMetaAnalysis, args) + p <- EmpiricalCalibration::computeTraditionalP( + logRr = estimate$logRr, + seLogRr = estimate$seLogRr, + twoSided = TRUE + ) + oneSidedP <- EmpiricalCalibration::computeTraditionalP( + logRr = estimate$logRr, + seLogRr = estimate$seLogRr, + twoSided = FALSE, + upper = TRUE + ) + estimate <- estimate |> + as_tibble() |> + rename( + ci95Lb = lb, + ci95Ub = ub + ) |> + mutate( + i2 = NA, + tau = NA, + mdrr = computeMdrrFromSe(estimate$seLogRr), + p = !!p, + oneSidedP = !!oneSidedP + ) + } else if (is(analysisSettings, "RandomEffectsMetaAnalysis")) { + m <- meta::metagen( + TE = llApproximations$logRr, + seTE = llApproximations$seLogRr, + studlab = rep("", nrow(llApproximations)), + byvar = NULL, + control = list(maxiter = 1000), + sm = "RR", + level.comb = 1 - analysisSettings$alpha + ) + rfx <- summary(m)$random + oneSidedP <- EmpiricalCalibration::computeTraditionalP( + logRr = rfx$TE, + seLogRr = rfx$seTE, + twoSided = FALSE, + upper = TRUE + ) + estimate <- tibble( + rr = exp(rfx$TE), + ci95Lb = exp(rfx$lower), + ci95Ub = exp(rfx$upper), + p = rfx$p, + oneSidedP = !!oneSidedP, + logRr = rfx$TE, + seLogRr = rfx$seTE, + i2 = m$I2, + tau = NA, + mdrr = computeMdrrFromSe(rfx$seTE) + ) + } else if (is(analysisSettings, "BayesianMetaAnalysis")) { + args <- analysisSettings + args$evidenceSynthesisAnalysisId <- NULL + args$evidenceSynthesisDescription <- NULL + args$evidenceSynthesisSource <- NULL + args$controlType <- NULL + args$data <- llApproximations + estimate <- do.call(EvidenceSynthesis::computeBayesianMetaAnalysis, args) + p <- EmpiricalCalibration::computeTraditionalP( + logRr = estimate$logRr, + seLogRr = estimate$seLogRr, + twoSided = TRUE + ) + oneSidedP <- EmpiricalCalibration::computeTraditionalP( + logRr = estimate$logRr, + seLogRr = estimate$seLogRr, + twoSided = FALSE, + upper = TRUE + ) + estimate <- estimate |> + as_tibble() |> + transmute( + rr = exp(.data$mu), + ci95Lb = exp(.data$mu95Lb), + ci95Ub = exp(.data$mu95Ub), + p = !!p, + oneSidedP = !!oneSidedP, + logRr = .data$mu, + seLogRr = .data$muSe, + tau = .data$tau, + i2 = NA, + mdrr = computeMdrrFromSe(estimate$seLogRr) + ) + } + } + estimate <- bind_cols(row, estimate, counts) |> + mutate(nDatabases = nDatabases) + return(estimate) + }, + .hasUnblindForEvidenceSynthesisColumn = function(connection, databaseSchema, table) { + row <- DatabaseConnector::renderTranslateQuerySql( + connection = connection, + sql = "SELECT TOP 1 * FROM @database_schema.@table;", + database_schema = databaseSchema, + table = table, + snakeCaseToCamelCase = TRUE + ) + return("unlindForEvidenceSynthesis" %in% colnames(row)) + }, + .getPerDatabaseEstimates = function(connection, databaseSchema, evidenceSynthesisSource) { + if (evidenceSynthesisSource$sourceMethod == "CohortMethod") { + key <- c("targetId", "comparatorId", "outcomeId") + databaseIds <- evidenceSynthesisSource$databaseIds + analysisIds <- evidenceSynthesisSource$analysisIds + if (private$.hasUnblindForEvidenceSynthesisColumn(connection, databaseSchema, "cm_diagnostics_summary")) { + unblindColumn <- "unblind_for_evidence_synthesis" + } else { + unblindColumn <- "unblind" + } + # For backwards compatibility, when CohortMethod did not generate diagnostics + # for negative controls: if negative control (outcome_of_interest = 0) then + # still unblind. + sql <- "SELECT cm_result.*, + mdrr, + CASE + WHEN @unblind_column IS NULL THEN 1 - outcome_of_interest + ELSE @unblind_column + END AS unblind + FROM @database_schema.cm_result + INNER JOIN @database_schema.cm_target_comparator_outcome + ON cm_result.target_id = cm_target_comparator_outcome.target_id + AND cm_result.comparator_id = cm_target_comparator_outcome.comparator_id + AND cm_result.outcome_id = cm_target_comparator_outcome.outcome_id + LEFT JOIN @database_schema.cm_diagnostics_summary + ON cm_result.target_id = cm_diagnostics_summary.target_id + AND cm_result.comparator_id = cm_diagnostics_summary.comparator_id + AND cm_result.outcome_id = cm_diagnostics_summary.outcome_id + AND cm_result.analysis_id = cm_diagnostics_summary.analysis_id + AND cm_result.database_id = cm_diagnostics_summary.database_id + {@database_ids != ''| @analysis_ids != ''} ? {WHERE} + {@database_ids != ''} ? { cm_result.database_id IN (@database_ids)} + {@analysis_ids != ''} ? { {@database_ids != ''} ? {AND} cm_result.analysis_id IN (@analysis_ids)}; + " + estimates <- DatabaseConnector::renderTranslateQuerySql( + connection = connection, + sql = sql, + database_schema = databaseSchema, + unblind_column = unblindColumn, + database_ids = if (is.null(databaseIds)) "" else private$.quoteSql(databaseIds), + analysis_ids = if (is.null(analysisIds)) "" else analysisIds, + snakeCaseToCamelCase = TRUE + ) |> + as_tibble() + + # Temp hack: detect NA values that have been converted to 0 in the DB: + idx <- estimates$seLogRr == 0 + estimates$logRr[idx] <- NA + estimates$seLogRr[idx] <- NA + estimates$p[idx] <- NA + + if (evidenceSynthesisSource$likelihoodApproximation == "normal") { + llApproximations <- estimates |> + filter(.data$unblind == 1) |> + select( + "targetId", + "comparatorId", + "outcomeId", + "analysisId", + "databaseId", + "logRr", + "seLogRr" + ) + } else if (evidenceSynthesisSource$likelihoodApproximation == "adaptive grid") { + sql <- "SELECT cm_likelihood_profile.* + FROM @database_schema.cm_likelihood_profile + WHERE log_likelihood IS NOT NULL + {@database_ids != ''} ? { AND cm_likelihood_profile.database_id IN (@database_ids)} + {@analysis_ids != ''} ? { AND cm_likelihood_profile.analysis_id IN (@analysis_ids)}; + " + llApproximations <- DatabaseConnector::renderTranslateQuerySql( + connection = connection, + sql = sql, + database_schema = databaseSchema, + database_ids = if (is.null(databaseIds)) "" else private$.quoteSql(databaseIds), + analysis_ids = if (is.null(analysisIds)) "" else analysisIds, + snakeCaseToCamelCase = TRUE + ) |> + inner_join( + estimates |> + filter(.data$unblind == 1) |> + select( + "targetId", + "comparatorId", + "outcomeId", + "analysisId", + "databaseId", + ), + by = c("targetId", "comparatorId", "outcomeId", "analysisId", "databaseId") + ) + } else { + stop(sprintf("Unknown likelihood approximation '%s'.", evidenceSynthesisSource$likelihoodApproximation)) + } + sql <- "SELECT * + FROM @database_schema.cm_target_comparator_outcome; + " + trueEffectSizes <- DatabaseConnector::renderTranslateQuerySql( + connection = connection, + sql = sql, + database_schema = databaseSchema, + snakeCaseToCamelCase = TRUE + ) + trueEffectSizes <- trueEffectSizes |> + mutate(trueEffectSize = ifelse(!is.na(.data$trueEffectSize) & .data$trueEffectSize == 0, + NA, + .data$trueEffectSize + )) + } else if (evidenceSynthesisSource$sourceMethod == "SelfControlledCaseSeries") { + key <- c("exposuresOutcomeSetId", "covariateId") + databaseIds <- evidenceSynthesisSource$databaseIds + analysisIds <- evidenceSynthesisSource$analysisIds + if (private$.hasUnblindForEvidenceSynthesisColumn(connection, databaseSchema, "sccs_diagnostics_summary")) { + unblindColumn <- "unblind_for_evidence_synthesis" + } else { + unblindColumn <- "unblind" + } + sql <- "SELECT sccs_result.*, + mdrr, + CASE + WHEN @unblind_column IS NULL THEN CASE WHEN true_effect_size IS NULL THEN 0 ELSE 1 END + ELSE @unblind_column + END AS unblind + FROM @database_schema.sccs_result + INNER JOIN @database_schema.sccs_covariate + ON sccs_result.database_id = sccs_covariate.database_id + AND sccs_result.exposures_outcome_set_id = sccs_covariate.exposures_outcome_set_id + AND sccs_result.covariate_id = sccs_covariate.covariate_id + AND sccs_result.analysis_id = sccs_covariate.analysis_id + INNER JOIN @database_schema.sccs_exposure + ON sccs_result.exposures_outcome_set_id = sccs_exposure.exposures_outcome_set_id + AND sccs_covariate.era_id = sccs_covariate.era_id + LEFT JOIN @database_schema.sccs_diagnostics_summary + ON sccs_result.exposures_outcome_set_id = sccs_diagnostics_summary.exposures_outcome_set_id + AND sccs_result.covariate_id = sccs_diagnostics_summary.covariate_id + AND sccs_result.analysis_id = sccs_diagnostics_summary.analysis_id + AND sccs_result.database_id = sccs_diagnostics_summary.database_id + {@database_ids != ''| @analysis_ids != ''} ? {WHERE} + {@database_ids != ''} ? { sccs_result.database_id IN (@database_ids)} + {@analysis_ids != ''} ? { {@database_ids != ''} ? {AND} sccs_result.analysis_id IN (@analysis_ids)}; + " + estimates <- DatabaseConnector::renderTranslateQuerySql( + connection = connection, + sql = sql, + database_schema = databaseSchema, + unblind_column = unblindColumn, + database_ids = if (is.null(databaseIds)) "" else private$.quoteSql(databaseIds), + analysis_ids = if (is.null(analysisIds)) "" else analysisIds, + snakeCaseToCamelCase = TRUE + ) |> + as_tibble() + + # Temp hack: detect NA values that have been converted to 0 in the DB: + idx <- estimates$seLogRr == 0 + estimates$logRr[idx] <- NA + estimates$seLogRr[idx] <- NA + estimates$p[idx] <- NA + + if (evidenceSynthesisSource$likelihoodApproximation == "normal") { + llApproximations <- estimates |> + filter(.data$unblind == 1) |> + select( + "exposuresOutcomeSetId", + "covariateId", + "analysisId", + "databaseId", + "logRr", + "seLogRr" + ) + } else if (evidenceSynthesisSource$likelihoodApproximation == "adaptive grid") { + sql <- "SELECT sccs_likelihood_profile.* + FROM @database_schema.sccs_likelihood_profile + WHERE log_likelihood IS NOT NULL + {@database_ids != ''} ? { AND sccs_likelihood_profile.database_id IN (@database_ids)} + {@analysis_ids != ''} ? { AND sccs_likelihood_profile.analysis_id IN (@analysis_ids)}; + " + llApproximations <- DatabaseConnector::renderTranslateQuerySql( + connection = connection, + sql = sql, + database_schema = databaseSchema, + database_ids = if (is.null(databaseIds)) "" else private$.quoteSql(databaseIds), + analysis_ids = if (is.null(analysisIds)) "" else analysisIds, + snakeCaseToCamelCase = TRUE + ) |> + inner_join( + estimates |> + filter(.data$unblind == 1) |> + select( + "exposuresOutcomeSetId", + "covariateId", + "analysisId", + "databaseId", + ), + by = c("exposuresOutcomeSetId", "covariateId", "analysisId", "databaseId") + ) + } else { + stop(sprintf("Unknown likelihood approximation '%s'.", evidenceSynthesisSource$likelihoodApproximation)) + } + sql <- "SELECT DISTINCT sccs_covariate.analysis_id, + sccs_covariate.exposures_outcome_set_id, + sccs_covariate.covariate_id, + true_effect_size + FROM @database_schema.sccs_exposure + INNER JOIN @database_schema.sccs_covariate + ON sccs_exposure.era_id = sccs_covariate.era_id + AND sccs_exposure.exposures_outcome_set_id = sccs_covariate.exposures_outcome_set_id + INNER JOIN @database_schema.sccs_covariate_analysis + ON sccs_covariate.analysis_id = sccs_covariate_analysis.analysis_id + AND sccs_covariate.covariate_analysis_id = sccs_covariate_analysis.covariate_analysis_id + WHERE sccs_covariate_analysis.variable_of_interest = 1; + " + trueEffectSizes <- DatabaseConnector::renderTranslateQuerySql( + connection = connection, + sql = sql, + database_schema = databaseSchema, + snakeCaseToCamelCase = TRUE + ) + trueEffectSizes <- trueEffectSizes |> + mutate(trueEffectSize = ifelse(!is.na(.data$trueEffectSize) & .data$trueEffectSize == 0, + NA, + .data$trueEffectSize + )) + } else { + stop(sprintf("Evidence synthesis for source method '%s' hasn't been implemented yet.", evidenceSynthesisSource$sourceMethod)) + } + return(list( + key = key, + estimates = estimates, + llApproximations = llApproximations, + trueEffectSizes = trueEffectSizes + )) + }, + .writeToCsv = function(data, fileName, append) { + tableName <- gsub(".csv$", "", basename(fileName)) + names <- colnames(private$.createEmptyResult(tableName)) + data <- data[, names] + data <- SqlRender::camelCaseToSnakeCaseNames(data) + readr::write_csv(data, fileName, append = append) + }, + .createEmptyResult = function(tableName = "") { + columns <- private$.getResultsDataModelSpecification() |> + filter(.data$tableName == !!tableName) |> + pull(.data$columnName) |> + SqlRender::snakeCaseToCamelCase() + result <- vector(length = length(columns)) + names(result) <- columns + result <- as_tibble(t(result), name_repair = "check_unique") + result <- result[FALSE, ] + return(result) + }, + .quoteSql = function(values) { + return(paste0("'", paste(values, collapse = "', '"), "'")) + }, + .getResultsDataModelSpecification = function() { + rdms <- CohortGenerator::readCsv( + file = private$.getResultsDataModelSpecificationFileLocation() + ) + return(rdms) + }, + .getResultsDataModelSpecificationFileLocation = function() { + return(system.file( + file.path("csv", "evidenceSynthesisRdms.csv"), + package = "Strategus" + )) + } + ) +) diff --git a/R/Module-PatientLevelPrediction.R b/R/Module-PatientLevelPrediction.R new file mode 100644 index 00000000..39612384 --- /dev/null +++ b/R/Module-PatientLevelPrediction.R @@ -0,0 +1,171 @@ +# PatientLevelPredictionModule ------------- +#' @title Module for performing patient-level prediction studies +#' @export +#' @description +#' Module for performing patient-level prediction in an observational +#' database in the OMOP Common Data Model. +PatientLevelPredictionModule <- R6::R6Class( + classname = "PatientLevelPredictionModule", + inherit = StrategusModule, + public = list( + #' @field tablePrefix The table prefix to append to the results tables + tablePrefix = "plp_", + #' @description Initialize the module + initialize = function() { + super$initialize() + }, + #' @description Executes the PatientLevelPrediction package + #' @template connectionDetails + #' @template analysisSpecifications + #' @template executionSettings + execute = function(connectionDetails, analysisSpecifications, executionSettings) { + super$execute(connectionDetails, analysisSpecifications, executionSettings) + checkmate::assertClass(executionSettings, "CdmExecutionSettings") + + jobContext <- private$jobContext + cohortDefinitionSet <- super$.createCohortDefinitionSetFromJobContext() + workFolder <- jobContext$moduleExecutionSettings$workSubFolder + resultsFolder <- jobContext$moduleExecutionSettings$resultsSubFolder + + # Creating database details + databaseDetails <- PatientLevelPrediction::createDatabaseDetails( + connectionDetails = connectionDetails, + cdmDatabaseSchema = jobContext$moduleExecutionSettings$cdmDatabaseSchema, + cohortDatabaseSchema = jobContext$moduleExecutionSettings$workDatabaseSchema, + cdmDatabaseName = jobContext$moduleExecutionSettings$connectionDetailsReference, + cdmDatabaseId = jobContext$moduleExecutionSettings$databaseId, + # tempEmulationSchema = , is there s temp schema specified anywhere? + cohortTable = jobContext$moduleExecutionSettings$cohortTableNames$cohortTable, + outcomeDatabaseSchema = jobContext$moduleExecutionSettings$workDatabaseSchema, + outcomeTable = jobContext$moduleExecutionSettings$cohortTableNames$cohortTable + ) + + jobContext$settings <- private$.setCovariateSchemaTable( + modelDesignList = jobContext$settings$modelDesignList, + cohortDatabaseSchema = jobContext$moduleExecutionSettings$workDatabaseSchema, + cohortTable = jobContext$moduleExecutionSettings$cohortTableNames$cohortTable + ) + + # run the models + PatientLevelPrediction::runMultiplePlp( + databaseDetails = databaseDetails, + modelDesignList = jobContext$settings, + cohortDefinitions = cohortDefinitionSet, + saveDirectory = workFolder + ) + + private$.message("Export data to csv files") + + sqliteConnectionDetails <- DatabaseConnector::createConnectionDetails( + dbms = "sqlite", + server = file.path(workFolder, "sqlite", "databaseFile.sqlite") + ) + + PatientLevelPrediction::extractDatabaseToCsv( + connectionDetails = sqliteConnectionDetails, + databaseSchemaSettings = PatientLevelPrediction::createDatabaseSchemaSettings( + resultSchema = "main", # sqlite settings + tablePrefix = "", # sqlite settings + targetDialect = "sqlite", + tempEmulationSchema = NULL + ), + csvFolder = file.path(resultsFolder), + fileAppend = NULL + ) + + private$.message(paste("Results available at:", resultsFolder)) + }, + #' @description Create the results data model for the module + #' @template resultsConnectionDetails + #' @template resultsDatabaseSchema + #' @template tablePrefix + createResultsDataModel = function(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix = self$tablePrefix) { + super$createResultsDataModel(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix) + PatientLevelPrediction::createPlpResultTables( + connectionDetails = resultsConnectionDetails, + targetDialect = resultsConnectionDetails$dbms, + resultSchema = resultsDatabaseSchema, + deleteTables = F, + createTables = T, + tablePrefix = tablePrefix + ) + }, + #' @description Upload the results for the module + #' @template resultsConnectionDetails + #' @template analysisSpecifications + #' @template resultsDataModelSettings + uploadResults = function(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) { + super$uploadResults(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) + + databaseSchemaSettings <- PatientLevelPrediction::createDatabaseSchemaSettings( + resultSchema = resultsDataModelSettings$resultsDatabaseSchema, + tablePrefix = self$tablePrefix, + targetDialect = resultsConnectionDetails$dbms + ) + + resultsFolder <- private$jobContext$moduleExecutionSettings$resultsSubFolder + # TODO: This function does not expose + # a way to specify the database identifier file + # which makes the purge problematic since I'm + # not sure how it will know what to purge... + PatientLevelPrediction::insertCsvToDatabase( + csvFolder = resultsFolder, + connectionDetails = resultsConnectionDetails, + databaseSchemaSettings = databaseSchemaSettings, + modelSaveLocation = file.path(resultsFolder, "dbmodels"), + csvTableAppend = "" + ) + }, + #' @description Creates the PatientLevelprediction Module Specifications + #' @param modelDesignList description + createModuleSpecifications = function(modelDesignList) { + analysis <- list() + for (name in names(formals(self$createModuleSpecifications))) { + analysis[[name]] <- get(name) + } + + specifications <- super$createModuleSpecifications( + moduleSpecifications = analysis + ) + return(specifications) + }, + #' @description Validate the module specifications + #' @param moduleSpecifications The PatientLevelPrediction module specifications + validateModuleSpecifications = function(moduleSpecifications) { + super$validateModuleSpecifications( + moduleSpecifications = moduleSpecifications + ) + } + ), + private = list( + .setCovariateSchemaTable = function( + modelDesignList, + cohortDatabaseSchema, + cohortTable) { + if (inherits(modelDesignList, "modelDesign")) { + modelDesignList <- list(modelDesignList) + } + + for (i in 1:length(modelDesignList)) { + covariateSettings <- modelDesignList[[i]]$covariateSettings + + if (inherits(covariateSettings, "covariateSettings")) { + covariateSettings <- list(covariateSettings) + } + + for (j in 1:length(covariateSettings)) { + if ("cohortDatabaseSchema" %in% names(covariateSettings[[j]])) { + covariateSettings[[j]]$cohortDatabaseSchema <- cohortDatabaseSchema + } + if ("cohortTable" %in% names(covariateSettings[[j]])) { + covariateSettings[[j]]$cohortTable <- cohortTable + } + } + + modelDesignList[[i]]$covariateSettings <- covariateSettings + } + + return(modelDesignList) + } + ) +) diff --git a/R/Module-SelfControlledCaseSeries.R b/R/Module-SelfControlledCaseSeries.R new file mode 100644 index 00000000..a6bc3c9a --- /dev/null +++ b/R/Module-SelfControlledCaseSeries.R @@ -0,0 +1,147 @@ +# SelfControlledCaseSeriesModule ------------- +#' @title Module for performing Self-Controlled Case Series (SCCS) analyses +#' in an observational database in the OMOP Common Data Model. +#' @export +#' @description +#' Module for performing Self-Controlled Case Series (SCCS) analyses +#' in an observational database in the OMOP Common Data Model. +SelfControlledCaseSeriesModule <- R6::R6Class( + classname = "SelfControlledCaseSeriesModule", + inherit = StrategusModule, + public = list( + #' @field tablePrefix The table prefix for results tables + tablePrefix = "sccs_", + #' @description Initialize the module + initialize = function() { + super$initialize() + }, + #' @description Executes the SelfControlledCaseSeries package + #' @template connectionDetails + #' @template analysisSpecifications + #' @template executionSettings + execute = function(connectionDetails, analysisSpecifications, executionSettings) { + super$execute(connectionDetails, analysisSpecifications, executionSettings) + checkmate::assertClass(executionSettings, "CdmExecutionSettings") + + jobContext <- private$jobContext + sccsMultiThreadingSettings <- SelfControlledCaseSeries::createDefaultSccsMultiThreadingSettings(parallel::detectCores()) + + args <- jobContext$settings + args$connectionDetails <- connectionDetails + args$cdmDatabaseSchema <- jobContext$moduleExecutionSettings$cdmDatabaseSchema + args$exposureDatabaseSchema <- jobContext$moduleExecutionSettings$workDatabaseSchema + args$exposureTable <- jobContext$moduleExecutionSettings$cohortTableNames$cohortTable + args$outcomeDatabaseSchema <- jobContext$moduleExecutionSettings$workDatabaseSchema + args$outcomeTable <- jobContext$moduleExecutionSettings$cohortTableNames$cohortTable + args$nestingCohortDatabaseSchema <- jobContext$moduleExecutionSettings$workDatabaseSchema + args$nestingCohortTable <- jobContext$moduleExecutionSettings$cohortTableNames$cohortTable + args$customCovariateDatabaseSchema <- jobContext$moduleExecutionSettings$workDatabaseSchema + args$customCovariateTable <- jobContext$moduleExecutionSettings$cohortTableNames$cohortTable + args$outputFolder <- jobContext$moduleExecutionSettings$workSubFolder + args$sccsMultiThreadingSettings <- sccsMultiThreadingSettings + args$sccsDiagnosticThresholds <- NULL + do.call(SelfControlledCaseSeries::runSccsAnalyses, args) + + exportFolder <- jobContext$moduleExecutionSettings$resultsSubFolder + SelfControlledCaseSeries::exportToCsv( + outputFolder = jobContext$moduleExecutionSettings$workSubFolder, + exportFolder = exportFolder, + databaseId = jobContext$moduleExecutionSettings$databaseId, + minCellCount = jobContext$moduleExecutionSettings$minCellCount, + sccsDiagnosticThresholds = jobContext$settings$sccsDiagnosticThresholds + ) + # TODO: Removing this to make the upload easier + #unlink(file.path(exportFolder, sprintf("Results_%s.zip", jobContext$moduleExecutionSettings$databaseId))) + + resultsDataModel <- CohortGenerator::readCsv(file = system.file("csv", "resultsDataModelSpecification.csv", package = "SelfControlledCaseSeries")) + resultsDataModel <- resultsDataModel[file.exists(file.path(exportFolder, paste0(resultsDataModel$tableName, ".csv"))), ] + if (any(!startsWith(resultsDataModel$tableName, self$tablePrefix))) { + stop("Table names do not have required prefix") + } + CohortGenerator::writeCsv( + x = resultsDataModel, + file = file.path(exportFolder, "resultsDataModelSpecification.csv"), + warnOnFileNameCaseMismatch = FALSE + ) + + private$.message(paste("Results available at:", exportFolder)) + }, + #' @description Create the results data model for the module + #' @template resultsConnectionDetails + #' @template resultsDatabaseSchema + #' @template tablePrefix + createResultsDataModel = function(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix = "") { + super$createResultsDataModel(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix) + # Note: not passing the tablePrefix argument to + # createResultsDataModel since the SCCS results + # model already contains the "sccs_" table prefix + SelfControlledCaseSeries::createResultsDataModel( + connectionDetails = resultsConnectionDetails, + databaseSchema = resultsDatabaseSchema, + ) + }, + #' @description Upload the results for the module + #' @template resultsConnectionDetails + #' @template analysisSpecifications + #' @template resultsDataModelSettings + uploadResults = function(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) { + super$uploadResults(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) + + # TODO: This is something SCCS does differently. + # Find the results zip file in the results sub folder + resultsFolder <- private$jobContext$moduleExecutionSettings$resultsSubFolder + zipFiles <- list.files( + path = resultsFolder, + pattern = "\\.zip$", + full.names = TRUE + ) + + if (length(zipFiles) > 0) { + zipFileName <- zipFiles[1] + } else { + # Create a zip file from the results in the directory + DatabaseConnector::createZipFile( + zipFile = "results.zip", + files = list.files(resultsFolder, pattern = ".*\\.csv$"), + rootFolder = resultsFolder + ) + zipFileName <- file.path(resultsFolder, "results.zip") + } + + SelfControlledCaseSeries::uploadResults( + connectionDetails = resultsConnectionDetails, + schema = resultsDataModelSettings$resultsDatabaseSchema, + zipFileName = zipFileName, + purgeSiteDataBeforeUploading = FALSE + ) + }, + #' @description Creates the SelfControlledCaseSeries Module Specifications + #' @param sccsAnalysisList description + #' @param exposuresOutcomeList description + #' @param analysesToExclude description + #' @param combineDataFetchAcrossOutcomes description + #' @param sccsDiagnosticThresholds description + createModuleSpecifications = function(sccsAnalysisList, + exposuresOutcomeList, + analysesToExclude = NULL, + combineDataFetchAcrossOutcomes = FALSE, + sccsDiagnosticThresholds = SelfControlledCaseSeries::createSccsDiagnosticThresholds()) { + analysis <- list() + for (name in names(formals(self$createModuleSpecifications))) { + analysis[[name]] <- get(name) + } + + specifications <- super$createModuleSpecifications( + moduleSpecifications = analysis + ) + return(specifications) + }, + #' @description Validate the module specifications + #' @param moduleSpecifications The SelfControlledCaseSeries module specifications + validateModuleSpecifications = function(moduleSpecifications) { + super$validateModuleSpecifications( + moduleSpecifications = moduleSpecifications + ) + } + ) +) diff --git a/R/Module-StrategusModule.R b/R/Module-StrategusModule.R new file mode 100644 index 00000000..a9d783cd --- /dev/null +++ b/R/Module-StrategusModule.R @@ -0,0 +1,285 @@ +# Job Context ------------- +#' @title Job context holds the elements of the analysis specification +#' and execution settings necessary to execute a module. +#' @description +#' This is an internal class used by the StrategusModule (and child classes) +#' execute function +JobContext <- R6::R6Class( + classname = "JobContext", + public = list( + #' @field sharedResources Shared resources for execution + #' TODO: Revisit to break this into fields for cohorts, subsets, + #' negative controls, + sharedResources = list(), + #' @field settings Module settings + settings = list(), + #' @field moduleExecutionSettings Module execution settings + moduleExecutionSettings = list() + ) +) + +# StrategusModule ------------- +#' @title StrategusModule defines the base class for each HADES Strategus module +#' @export +#' @description +#' Provides a base class for HADES Strategus modules to inherit +StrategusModule <- R6::R6Class( + classname = "StrategusModule", + public = list( + #' @field moduleName The name of the module taken from the class name. + #' This is set in the constructor of the class. + moduleName = "", + #' @field moduleClassName The class name that identifies + #' the module specifications in the overall analysis specification. + #' This is set in the constructor of the class. + moduleClassName = "", + #' @field internalModuleSpecificationClassName A constant value. + #' The base class name that identifies a module specification + #' in the analysis specification. + internalModuleSpecificationClassName = "ModuleSpecifications", + #' @field internalSharedResourcesClassName A constant value. The class name + #' that identifies the shared resources section in the overall analysis + #' specification. + internalSharedResourcesClassName = "SharedResources", + #' @description Initialize the module + initialize = function() { + self$moduleName = class(self)[[1]] + self$moduleClassName = paste0(self$moduleName, "Specifications") + }, + #' @description Executes the module + #' @template connectionDetails + #' @template analysisSpecifications + #' @template executionSettings + execute = function(connectionDetails, analysisSpecifications, executionSettings) { + errorMessages <- checkmate::makeAssertCollection() + checkmate::assertClass(connectionDetails, "ConnectionDetails", add = errorMessages) + checkmate::assertClass(analysisSpecifications, "AnalysisSpecifications", add = errorMessages) + checkmate::assertClass(executionSettings, "ExecutionSettings", add = errorMessages) + checkmate::reportAssertions(collection = errorMessages) + + # Setup the job context + private$.createJobContext(analysisSpecifications, executionSettings) + private$.message('EXECUTING: ', self$moduleName) + }, + #' @description Create the results data model for the module + #' @template resultsConnectionDetails + #' @template resultsDatabaseSchema + #' @template tablePrefix + createResultsDataModel = function(resultsConnectionDetails, resultsDatabaseSchema, tablePrefix = "") { + errorMessages <- checkmate::makeAssertCollection() + checkmate::assertClass(resultsConnectionDetails, "ConnectionDetails", add = errorMessages) + checkmate::assertCharacter(resultsDatabaseSchema, len = 1, add = errorMessages) + checkmate::reportAssertions(collection = errorMessages) + private$.message('CREATE RESULTS DATA MODEL: ', self$moduleName) + }, + #' @description Upload the results for the module + #' @template resultsConnectionDetails + #' @template analysisSpecifications + #' @template resultsDataModelSettings + uploadResults = function(resultsConnectionDetails, analysisSpecifications, resultsDataModelSettings) { + errorMessages <- checkmate::makeAssertCollection() + checkmate::assertClass(resultsConnectionDetails, "ConnectionDetails", add = errorMessages) + checkmate::assertClass(resultsDataModelSettings, "ResultsDataModelSettings", add = errorMessages) + checkmate::reportAssertions(collection = errorMessages) + + # Setup the job context + private$.createJobContext(analysisSpecifications, resultsDataModelSettings) + private$.message('UPLOAD RESULTS: ', self$moduleName) + }, + #' @description Base function for creating the module settings object. + #' Each module will have its own implementation and this base class method + #' will be used to ensure the class of the specifications is set properly. + #' @template moduleSpecifications + createModuleSpecifications = function(moduleSpecifications) { + moduleSpecifications = list( + module = self$moduleName, + settings = moduleSpecifications + ) + class(moduleSpecifications) <- c(self$internalModuleSpecificationClassName, self$moduleClassName) + return(moduleSpecifications) + }, + #' @description Base function for creating the shared resources settings object. + #' Each module will have its own implementation if it needs to create + #' a shared resource. + #' @param className The class name of the shared resources specifications + #' @param sharedResourcesSpecifications The shared resources specifications + createSharedResourcesSpecifications = function(className, sharedResourcesSpecifications) { + class(sharedResourcesSpecifications) <- c(className, self$internalSharedResourcesClassName) + return(sharedResourcesSpecifications) + }, + #' @description Base function for validating the module settings object. + #' Each module will have its own implementation and this base class method + #' will be used to ensure the module specifications are valid ahead of + #' execution + #' @template moduleSpecifications + validateModuleSpecifications = function(moduleSpecifications) { + errorMessages <- checkmate::makeAssertCollection() + checkmate::assertClass(moduleSpecifications, self$internalModuleSpecificationClassName) + checkmate::assertClass(moduleSpecifications, self$moduleClassName) + checkmate::reportAssertions(collection = errorMessages) + }, + #' @description Base function for validating the shared resources + #' specification settings object. Each module will have its own + #' implementation and this base class method will be used to ensure + #' the module specifications are valid ahead of execution + #' @param className The class name of the shared resources specifications + #' @param sharedResourcesSpecifications The shared resources specifications + validateSharedResourcesSpecifications = function(className, sharedResourcesSpecifications) { + errorMessages <- checkmate::makeAssertCollection() + checkmate::assertClass(sharedResourcesSpecifications, self$internalSharedResourcesClassName) + checkmate::assertClass(sharedResourcesSpecifications, className) + checkmate::reportAssertions(collection = errorMessages) + } + ), + private = list( + jobContext = JobContext$new(), + .message = function(...) { + rlang::inform(paste0(...)) + }, + .createJobContext = function(analysisSpecifications, executionSettings) { + # Make sure this is created each call + private$jobContext <- JobContext$new() + # Get the moduleSpecification from the analysis specification + # for the current class name. + moduleSpecification <- private$.getModuleSpecification( + analysisSpecifications = analysisSpecifications, + moduleName = self$moduleName + ) + if (is.null(moduleSpecification)) { + stop(paste0(self$moduleName, " settings could not be found in the analysis specification.")) + } + private$jobContext$settings <- moduleSpecification$settings + + # Assemble the job context from the analysis specification + # for the given module. + private$jobContext$sharedResources <- analysisSpecifications$sharedResources + private$jobContext$moduleExecutionSettings <- executionSettings + private$jobContext$moduleExecutionSettings$resultsSubFolder <- file.path(private$jobContext$moduleExecutionSettings$resultsFolder, self$moduleName) + + if (is(private$jobContext$moduleExecutionSettings, "ExecutionSettings")) { + private$jobContext$moduleExecutionSettings$workSubFolder <- file.path(private$jobContext$moduleExecutionSettings$workFolder, self$moduleName) + } + + # TODO: This should be in the execution settings already for + # CDM ExecutionSettings + #private$jobContext$moduleExecutionSettings$databaseId <- databaseId + }, + .getModuleSpecification = function(analysisSpecifications, moduleName) { + moduleSpecification <- NULL + for (i in 1:length(analysisSpecifications$moduleSpecifications)) { + curModuleName <- analysisSpecifications$moduleSpecifications[[i]]$module + if (tolower(curModuleName) == tolower(moduleName)) { + moduleSpecification <- analysisSpecifications$moduleSpecifications[[i]] + } + } + return(moduleSpecification) + }, + .getSharedResourceByClassName = function(sharedResources, className) { + returnVal <- NULL + for (i in 1:length(sharedResources)) { + if (className %in% class(sharedResources[[i]])) { + returnVal <- sharedResources[[i]] + break + } + } + invisible(returnVal) + }, + .createCohortDefinitionSetFromJobContext = function(generateStats) { + jobContext <- private$jobContext + cohortDefinitions <- list() + if (length(jobContext$sharedResources) <= 0) { + stop("No shared resources found") + } + cohortDefinitionSharedResource <- private$.getSharedResourceByClassName( + sharedResources = jobContext$sharedResources, + class = "CohortDefinitionSharedResources" + ) + if (is.null(cohortDefinitionSharedResource)) { + stop("Cohort definition shared resource not found!") + } + if ((is.null(cohortDefinitionSharedResource$subsetDefs) && !is.null(cohortDefinitionSharedResource$cohortSubsets)) || + (!is.null(cohortDefinitionSharedResource$subsetDefs) && is.null(cohortDefinitionSharedResource$cohortSubsets))) { + stop("Cohort subset functionality requires specifying cohort subset definition & cohort subset identifiers.") + } + cohortDefinitionSet <- private$.getCohortDefinitionSetFromSharedResource( + cohortDefinitionSharedResource = cohortDefinitionSharedResource, + generateStats = generateStats + ) + return(cohortDefinitionSet) + }, + .getCohortDefinitionSetFromSharedResource = function(cohortDefinitionSharedResource, generateStats) { + cohortDefinitions <- cohortDefinitionSharedResource$cohortDefinitions + if (length(cohortDefinitions) <= 0) { + stop("No cohort definitions found") + } + cohortDefinitionSet <- CohortGenerator::createEmptyCohortDefinitionSet() + for (i in 1:length(cohortDefinitions)) { + cohortJson <- cohortDefinitions[[i]]$cohortDefinition + cohortExpression <- CirceR::cohortExpressionFromJson(cohortJson) + cohortSql <- CirceR::buildCohortQuery(cohortExpression, options = CirceR::createGenerateOptions(generateStats = generateStats)) + cohortDefinitionSet <- rbind(cohortDefinitionSet, data.frame( + cohortId = as.double(cohortDefinitions[[i]]$cohortId), + cohortName = cohortDefinitions[[i]]$cohortName, + sql = cohortSql, + json = cohortJson, + stringsAsFactors = FALSE + )) + } + + if (length(cohortDefinitionSharedResource$subsetDefs)) { + subsetDefinitions <- lapply(cohortDefinitionSharedResource$subsetDefs, CohortGenerator::CohortSubsetDefinition$new) + for (subsetDef in subsetDefinitions) { + ind <- which(sapply(cohortDefinitionSharedResource$cohortSubsets, function(y) subsetDef$definitionId %in% y$subsetId)) + targetCohortIds <- unlist(lapply(cohortDefinitionSharedResource$cohortSubsets[ind], function(y) y$targetCohortId)) + cohortDefinitionSet <- CohortGenerator::addCohortSubsetDefinition( + cohortDefinitionSet = cohortDefinitionSet, + cohortSubsetDefintion = subsetDef, + targetCohortIds = targetCohortIds + ) + } + } + + return(cohortDefinitionSet) + }, + .jobContextHasNegativeControlOutcomeSharedResource = function() { + jobContext <- private$jobContext + ncSharedResource <- private$.getSharedResourceByClassName( + sharedResources = jobContext$sharedResources, + className = "NegativeControlOutcomeSharedResources" + ) + hasNegativeControlOutcomeSharedResource <- !is.null(ncSharedResource) + invisible(hasNegativeControlOutcomeSharedResource) + }, + .createNegativeControlOutcomeSettingsFromJobContext = function() { + jobContext <- private$jobContext + negativeControlSharedResource <- private$.getSharedResourceByClassName( + sharedResources = jobContext$sharedResources, + className = "NegativeControlOutcomeSharedResources" + ) + if (is.null(negativeControlSharedResource)) { + stop("Negative control outcome shared resource not found!") + } + negativeControlOutcomes <- negativeControlSharedResource$negativeControlOutcomes$negativeControlOutcomeCohortSet + if (length(negativeControlOutcomes) <= 0) { + stop("No negative control outcomes found") + } + negativeControlOutcomeCohortSet <- CohortGenerator::createEmptyNegativeControlOutcomeCohortSet() + for (i in 1:length(negativeControlOutcomes)) { + nc <- negativeControlOutcomes[[i]] + negativeControlOutcomeCohortSet <- rbind( + negativeControlOutcomeCohortSet, + data.frame( + cohortId = as.numeric(nc$cohortId), + cohortName = nc$cohortName, + outcomeConceptId = as.numeric(nc$outcomeConceptId) + ) + ) + } + invisible(list( + cohortSet = negativeControlOutcomeCohortSet, + occurrenceType = negativeControlSharedResource$negativeControlOutcomes$occurrenceType, + detectOnDescendants = negativeControlSharedResource$negativeControlOutcomes$detectOnDescendants + )) + } + ) +) diff --git a/R/ModuleEnv.R b/R/ModuleEnv.R deleted file mode 100644 index 03139f36..00000000 --- a/R/ModuleEnv.R +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright 2024 Observational Health Data Sciences and Informatics -# -# This file is part of Strategus -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -.handleInjectVar <- function(x) { - hVar <- function(x) { - if (is.character(x)) { - return(sprintf('"%s"', x)) - } else { - return(sprintf("%s", x)) - } - } - - if (length(x) == 1) { - return(hVar(x)) - } else if (is.vector(x)) { - innerVars <- hVar(x) - return(paste0("c(", paste(innerVars, collapse = ", "), ")")) - } else { - stop("cannot handle complex data structures in variable injection") - } -} - -#' Load module execution space inside and renv -#' inspired by targets::tar_script but allowing custom variable execution -#' -#' Designed to allow more human readable code that is executed inside a module as well as simple variable substitution -#' for injecting constants (e.g. simple parameters or file paths used inside and outside of modules) -#' -#' This pattern also allows dependency injection which could be used if you don't want to use and renv and (instead) -#' would like to use docker images or just execution in the base environment for testing/debugging -#' -#' @param code code block to execute -#' @param moduleFolder Instantiated Strategus module folder -#' @param injectVars list of var names list(name=value) to replace (e.g. replace list(foo = "some string") will -#' find the pattern foo and replace it with the string some string - be careful! -#' @param tempScriptFile tempFile to write script to -#' @param job run as rstudio job -#' @param processName String name for process -#' @returns NULL invisibly -withModuleRenv <- function(code, - moduleFolder, - injectVars = list(), - tempScriptFile = tempfile(fileext = ".R"), - job = FALSE, - processName = paste(moduleFolder, "_renv_run")) { - # convert human readable code to a string for writing - script <- as.character(substitute(code))[-1] - # Insert variables - for (name in names(injectVars)) { - rep <- .handleInjectVar(injectVars[[name]]) - script <- gsub(name, rep, script) - } - - # Attach renv options() from the calling environment to the renv::run context - # renv options are prefixed with "renv." as described in - # https://rstudio.github.io/renv/reference/config.html - envOptions <- options() - renvOptions <- envOptions[grepl("renv\\.", names(envOptions))] - if (length(renvOptions) > 0) { - for (i in 1:length(renvOptions)) { - script <- c(.copyOptionForScript( - optionName = names(renvOptions)[[i]], - optionValue = renvOptions[[i]] - ), script) - } - } - - # Turning off verbose output to hide renv output - # unless the user has set this option to TRUE. - if (!getOption(x = "renv.verbose", default = FALSE)) { - options(renv.verbose = FALSE) - } - - # Import the Strategus functions we need to use in the module scripts - script <- c("retrieveConnectionDetails <- ", base::deparse(Strategus::retrieveConnectionDetails), script) - script <- c("unlockKeyring <- ", base::deparse(Strategus::unlockKeyring), script) - - # Write file and execute script inside an renv - fileConn <- file(tempScriptFile) - writeLines(script, fileConn) - close(fileConn) - renv::run( - script = tempScriptFile, - job = job, - name = processName, - project = moduleFolder - ) - return(invisible(NULL)) -} - -.getLocalLibraryScipt <- function(x) { - libPath <- file.path(find.package(x), "../") - sprintf("library(%s, lib.loc = '%s')", x, libPath) -} - -.copyOptionForScript <- function(optionName, optionValue) { - if (is.logical(optionValue) || is.numeric(optionValue)) { - sprintf("options(%s = %s)", optionName, optionValue) - } else if (is.character(optionValue) && length(optionValue) == 1) { - sprintf("options(%s = '%s')", optionName, optionValue) - } else if (is.character(optionValue) && length(optionValue) > 1) { - sprintf("options(%s = c('%s'))", optionName, paste(optionValue, collapse = "','")) - } else { - paste0("# option = ", optionName, " - could not be passed to this file, likely because it is a function.") - } -} - -.formatAndNormalizeFilePathForScript <- function(filePath) { - return(gsub("\\\\", "/", normalizePath(path = filePath, mustWork = F))) -} diff --git a/R/ModuleInstantiation.R b/R/ModuleInstantiation.R deleted file mode 100644 index 6aa6403d..00000000 --- a/R/ModuleInstantiation.R +++ /dev/null @@ -1,523 +0,0 @@ -# Copyright 2024 Observational Health Data Sciences and Informatics -# -# This file is part of Strategus -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -#' Ensure all modules are instantiated -#' -#' @description -#' Ensure that all modules referenced in the analysis specifications are instantiated -#' locally in the folder specified in the `INSTANTIATED_MODULES_FOLDER` environmental -#' variable. -#' -#' Missing modules will be fetched from remote repositories. -#' -#' This function will also check whether there are different versions of the same -#' module specified, which is not allowed, and whether all modules required by the -#' specified modules are also instantiated. -#' -#' @template AnalysisSpecifications -#' -#' @template forceVerification -#' -#' @template enforceModuleDependencies -#' -#' @return -#' A list containing the install status of all modules -#' (TRUE if all are installed properly) and a tibble listing -#' the instantiated modules. -#' -#' @export -ensureAllModulesInstantiated <- function(analysisSpecifications, forceVerification = FALSE, enforceModuleDependencies = TRUE) { - modules <- getModuleTable(analysisSpecifications, distinct = TRUE) - - # Verify only one version per module: - multipleVersionsPerModule <- modules %>% - group_by(module) %>% - summarise(versions = n()) %>% - filter(versions > 1) - if (nrow(multipleVersionsPerModule) > 0) { - stop(sprintf( - "Only one version per module allowed in a single analyses specification.\nMultiple versions found for module(s) `%s`.", - paste(multipleVersionsPerModule$module, collapse = "', '") - )) - } - - # Ensure all required modules are instantiated: - for (i in 1:nrow(modules)) { - ensureModuleInstantiated( - module = modules$module[i], - version = modules$version[i], - remoteRepo = modules$remoteRepo[i], - remoteUsername = modules$remoteUsername[i] - ) - } - - # Check required dependencies have been declare in the specification - # unless the user has set enforceModuleDependencies == FALSE - checkModuleDependencies( - modules = modules, - enforceModuleDependencies = enforceModuleDependencies - ) - - # Verify all modules are properly installed - moduleInstallStatus <- list() - for (i in 1:nrow(modules)) { - status <- verifyModuleInstallation( - module = modules$module[i], - version = modules$version[i], - forceVerification = forceVerification - ) - moduleInstallStatus[[length(moduleInstallStatus) + 1]] <- status - } - attr(modules, "moduleInstallStatus") <- moduleInstallStatus - - installStatus <- unlist(lapply(moduleInstallStatus, FUN = function(x) { - x$moduleInstalled - })) - if (!all(installStatus)) { - problemModules <- moduleInstallStatus[!installStatus] - message("There were ", length(problemModules), " issue(s) found with your Strategus modules!") - for (i in seq_along(problemModules)) { - message("Issue #", i, ": Module ", problemModules[[i]]$moduleFolder, " could not install the following R packages:") - print(problemModules[[i]]$issues) - } - message("To fix these issues, open the module project (.Rproj file) at the path specified above and re-run \"renv::restore()\" and correct all issues") - } - - return( - list( - allModulesInstalled = all(installStatus), - modules = modules - ) - ) -} - - -#' Verify a module is properly installed -#' -#' @description -#' In some instances a module may fail to instantiate and install due to problems -#' when calling renv::restore for the module's renv.lock file. This function -#' will allow you to surface inconsistencies between the module renv.lock file -#' and the module's renv project library. This function will check to that a -#' module has been properly installed using internal functions of the `renv` -#' package. If a module is verified to work via this function, the hash of -#' the module's renv.lock file will be written to a text file in the module -#' directory to indicate that it is ready for use. This will allow subsequent -#' calls to work faster since the initial verification process can take some -#' time.It is possible to re-run the verification of a module -#' by using the `forceVerification` parameter. -#' -#' To fix issues with a module, you will need to open the module's .Rproj in -#' RStudio instance and debug the issues when calling renv::restore(). -#' -#' @param module The name of the module to verify (i.e. "CohortGeneratorModule") -#' -#' @param version The version of the module to verify (i.e. "0.2.1") -#' -#' @param silent When TRUE output of this verification process is suppressed -#' -#' @template forceVerification -#' -#' @return -#' A list with the output of the consistency check -#' -#' @export -verifyModuleInstallation <- function(module, version, silent = FALSE, forceVerification = FALSE) { - # Internal helper function - verifyModuleInstallationReturnValue <- function(moduleFolder, moduleInstalled, issues = NULL) { - returnVal <- list( - moduleFolder = moduleFolder, - moduleInstalled = moduleInstalled, - issues = issues - ) - return(returnVal) - } - - moduleFolder <- getModuleFolder(module, version) - if (!dir.exists(moduleFolder)) { - if (!silent) { - warning("Module ", module, ", Version: ", version, " not found at: ", moduleFolder, ". This means the module was never installed.") - } - return( - verifyModuleInstallationReturnValue( - moduleFolder = moduleFolder, - moduleInstalled = FALSE - ) - ) - } - - if (!silent) { - message("Verifying module: ", module, ", (", version, ") at ", moduleFolder, "...", appendLF = F) - } - moduleStatusFileName <- "moduleStatus.txt" - renvLockFileName <- "renv.lock" - - # If the lock file doesn't exist, we're not sure if we're dealing with a module. - if (!file.exists(file.path(moduleFolder, renvLockFileName))) { - if (!silent) { - message("ERROR - renv.lock file missing.") - } - return( - verifyModuleInstallationReturnValue( - moduleFolder = moduleFolder, - moduleInstalled = FALSE - ) - ) - } - - # Check to see if we've already performed the verification by looking at the - # moduleStatus.txt file to see if the md5 in that file matches the one - # created by hashing the renv.lock file - lockfileContents <- ParallelLogger::loadSettingsFromJson( - fileName = file.path(moduleFolder, renvLockFileName) - ) - lockfileHash <- digest::digest( - object = lockfileContents, - algo = "md5" - ) - if (!forceVerification && file.exists(file.path(moduleFolder, moduleStatusFileName))) { - lockfileHashFromModuleStatusFile <- SqlRender::readSql( - sourceFile = file.path(moduleFolder, moduleStatusFileName) - ) - - # If the values match, the module is installed correctly - # return and exit - if (lockfileHashFromModuleStatusFile == lockfileHash) { - if (!silent) { - message("MODULE READY!") - } - return( - verifyModuleInstallationReturnValue( - moduleFolder = moduleFolder, - moduleInstalled = TRUE - ) - ) - } - } - - - # Now perform the consistency check to verify that the renv::restore() - # process executed successfully. We must do this in the module's context - Strategus:::withModuleRenv( - code = { - # Get the renv project status and then identify the packages used - # in the project to determine if there were issues when restoring - # the project from the renv.lock file. - projectStatus <- renv::status() - - # Identify the list of package dependencies by using - # the data returned from renv::status() and - # renv::dependencies for the project. - library <- names(projectStatus$library$Packages) - lockfile <- names(projectStatus$lockfile$Packages) - packages <- sort(union(renv::dependencies(quiet = TRUE)$Package, "renv")) - packages <- sort(unique(c(library, lockfile, packages))) - projectStatus$packages <- packages - saveRDS( - object = list( - library = library, - lockfile = lockfile, - packages = packages - ), - file = "projectStatus.rds" - ) - }, - moduleFolder = moduleFolder - ) - - # The module's project status is written to the - # file system. Now we can get the module status and use the information - # to determine the restoration status - projectStatus <- readRDS(file.path(moduleFolder, "projectStatus.rds")) - - library <- projectStatus$library - lockfile <- projectStatus$lockfile - packages <- projectStatus$packages - - packageStatus <- data.frame( - package = packages, - installed = packages %in% library, - recorded = packages %in% lockfile, - used = packages %in% packages - ) - - # If all of the used & recorded packages are installed, then - # return TRUE for the module installed status. If not, return - # FALSE and set an attribute of the list that contains the issues - # discovered - ok <- packageStatus$installed & (packageStatus$used == packageStatus$recorded) - issues <- packageStatus[!ok, , drop = FALSE] - missing <- !issues$installed - issues$installed <- ifelse(issues$installed, "y", "n") - issues$recorded <- ifelse(issues$recorded, "y", "n") - issues$used <- ifelse(issues$used, "y", if (any(missing)) "?" else "n") - issues <- issues[issues$installed == "n" & issues$recorded == "y" & issues$used == "y", ] - - moduleInstalled <- nrow(issues) == 0 - - if (isTRUE(moduleInstalled)) { - if (!silent) { - message("MODULE READY!") - } - # Write the contents of the md5 hash of the module's - # renv.lock file to the file system to note that the - # module's install status was successful and verified - SqlRender::writeSql( - sql = lockfileHash, - targetFile = file.path(moduleFolder, "moduleStatus.txt") - ) - } else { - if (!silent) { - message("MODULE HAS ISSUES!") - } - } - - return( - verifyModuleInstallationReturnValue( - moduleFolder = moduleFolder, - moduleInstalled = moduleInstalled, - issues = issues - ) - ) -} - - -#' Install the latest release of a module -#' -#' @description -#' This function will call out to the OHDSI GitHub repo to find the latest -#' version of the module and attempt to install it. Only modules that are listed -#' in the `getModuleList()` function are allowed since it will have a known -#' GitHub location. -#' -#' @param moduleName The name of the module to install (i.e. "CohortGeneratorModule"). -#' This parameter must match a value found in the `module` column of `getModuleList()` -#' -#' @return -#' None - this function is called for its side effects -#' -#' @export -installLatestModule <- function(moduleName) { - assertModulesFolderSetting(x = Sys.getenv("INSTANTIATED_MODULES_FOLDER")) - instantiatedModulesFolder <- Sys.getenv("INSTANTIATED_MODULES_FOLDER") - # Verify that the user's GITHUB_PAT is set properly - # otherwise we may hit a rate limit - if (Sys.getenv("GITHUB_PAT") == "") { - stop("You must set your GITHUB_PAT to use this function. Please use the function `usethis::create_github_token()` and try again after restarting your R session.") - } - moduleList <- getModuleList() - if (isFALSE(moduleName %in% moduleList$module)) { - stop("Module: ", module, " not found in the list from Strategus::getModuleList().") - } - moduleDetails <- moduleList %>% - dplyr::filter(module == moduleName) - urlTemplate <- "https://api.%s/repos/%s/%s/releases/latest" - baseUrl <- sprintf(urlTemplate, moduleDetails$remoteRepo, moduleDetails$remoteUsername, moduleDetails$module) - req <- httr2::request(base_url = baseUrl) |> - httr2::req_headers( - "Authorization" = paste0("Bearer ", Sys.getenv("GITHUB_PAT")), - "X-GitHub-Api-Version" = "2022-11-28" - ) - response <- httr2::req_perform(req) - release <- jsonlite::fromJSON(httr2::resp_body_string(response)) - version <- gsub("v", "", release$tag_name, ignore.case = TRUE) - moduleFolder <- ensureModuleInstantiated( - module = moduleDetails$module, - version = version, - remoteRepo = moduleDetails$remoteRepo, - remoteUsername = moduleDetails$remoteUsername - ) - rlang::inform(paste0("Installed ", moduleName, " to ", moduleFolder)) -} - -extractDependencies <- function(modules) { - extractDependenciesSingleModule <- function(module) { - moduleFolder <- getModuleFolder(module$module, module$version) - metaData <- getModuleMetaData(moduleFolder) - dependencies <- tibble( - module = module$module, - dependsOn = as.character(metaData$Dependencies) - ) - return(dependencies) - } - dependencies <- lapply(split(modules, 1:nrow(modules)), extractDependenciesSingleModule) %>% - bind_rows() - return(dependencies) -} - -checkModuleDependencies <- function(modules, enforceModuleDependencies) { - # Check required dependencies have been declare in the specification - # unless the user has set enforceModuleDependencies == FALSE - dependencies <- extractDependencies(modules) - missingDependencies <- dependencies %>% - filter(!dependsOn %in% modules$module) - if (nrow(missingDependencies) > 0 && enforceModuleDependencies) { - message <- paste( - c( - "Detected missing dependencies:", - sprintf("- Missing module '%s' required by module '%s'", missingDependencies$dependsOn, missingDependencies$module) - ), - collapse = "\n" - ) - stop(message) - } -} - -getModuleTable <- function(analysisSpecifications, distinct = FALSE) { - modules <- lapply( - analysisSpecifications$moduleSpecifications, - function(x) { - tibble( - module = x$module, - version = x$version, - remoteRepo = x$remoteRepo, - remoteUsername = x$remoteUsername - ) - } - ) %>% - bind_rows() - if (distinct) { - modules <- modules %>% - distinct(module, version, .keep_all = TRUE) - } - return(modules) -} - -getModuleMetaData <- function(moduleFolder) { - jsonFileName <- file.path(moduleFolder, "MetaData.json") - if (!file.exists(jsonFileName)) { - stop(sprintf("Meta-data JSON not found in '%s'.", moduleFolder)) - } - metaData <- ParallelLogger::loadSettingsFromJson(jsonFileName) - return(metaData) -} - -getModuleFolder <- function(module, version) { - assertModulesFolderSetting(x = Sys.getenv("INSTANTIATED_MODULES_FOLDER")) - moduleFolder <- file.path(Sys.getenv("INSTANTIATED_MODULES_FOLDER"), sprintf("%s_%s", module, version)) - invisible(moduleFolder) -} - -ensureModuleInstantiated <- function(module, version, remoteRepo, remoteUsername) { - assertModulesFolderSetting(x = Sys.getenv("INSTANTIATED_MODULES_FOLDER")) - instantiatedModulesFolder <- Sys.getenv("INSTANTIATED_MODULES_FOLDER") - if (!dir.exists(instantiatedModulesFolder)) { - dir.create(instantiatedModulesFolder, recursive = TRUE) - } - moduleFolder <- getModuleFolder(module, version) - if (!dir.exists(moduleFolder)) { - instantiateModule(module, version, remoteRepo, remoteUsername, moduleFolder) - } - return(moduleFolder) -} - -instantiateModule <- function(module, version, remoteRepo, remoteUsername, moduleFolder) { - dir.create(moduleFolder) - success <- FALSE - on.exit(if (!success) unlink(moduleFolder, recursive = TRUE)) - moduleFile <- file.path(moduleFolder, sprintf("%s_%s.zip", module, version)) - if (module == "TestModule1") { - # For unit testing purposes only: get module from inst/testdata folder - file.copy( - from = system.file( - file.path("testdata", basename(moduleFile)), - package = utils::packageName() - ), - to = moduleFolder - ) - } else { - moduleUrl <- sprintf("https://%s/%s/%s/archive/refs/tags/v%s.zip", remoteRepo, remoteUsername, module, version) - utils::download.file(url = moduleUrl, destfile = moduleFile) - } - utils::unzip(zipfile = moduleFile, exdir = moduleFolder) - unlink(moduleFile) - # At this point, the unzipped folders will likely exist in a sub folder. - # Move all files from that sub folder to the main module folder - subFolders <- list.dirs(path = moduleFolder, recursive = FALSE) - if (length(subFolders) > 0) { - for (i in 1:length(subFolders)) { - R.utils::copyDirectory( - from = subFolders[i], - to = moduleFolder, - recursive = TRUE - ) - unlink(subFolders[i], recursive = TRUE) - } - } - - # Verify the structure of the module to ensure that - # it contains the proper files required by renv - # before we restore from the renv.lock file - renvDependencies <- getModuleRenvDependencies(moduleFolder) - if (nrow(renvDependencies) > 0) { - message <- paste( - c( - sprintf("The module '%s' (v%s) is missing the following files required by renv:", module, version), - sprintf("- Missing renv dependency '%s'", renvDependencies$fileName), - "As a result, Strategus cannot use this module as part of the execution pipeline otherwise it may corrupt your R library.", - "Please check to see if a newer version of this module exists and update your analysis specification to use that module instead." - ), - collapse = "\n" - ) - stop(message) - } - - withModuleRenv( - code = { - renv::restore(prompt = FALSE) - }, - moduleFolder = moduleFolder, - injectVars = list(moduleFolder = moduleFolder) - ) - success <- TRUE -} - -getModuleRenvDependencies <- function(moduleFolder) { - renvRequiredFiles <- c( - ".Rprofile", - "renv.lock", - "renv/activate.R", - "renv/settings.json" - ) - - missingFiles <- tibble::enframe(renvRequiredFiles) %>% - dplyr::mutate(fileExists = file.exists(file.path(moduleFolder, value))) %>% - dplyr::rename(fileName = value) %>% - dplyr::select("fileName", "fileExists") %>% - dplyr::filter(fileExists == FALSE) - - invisible(missingFiles) -} - -getModuleTablePrefixes <- function(moduleList) { - moduleTablePrefix <- tibble::tibble() - for (i in 1:nrow(moduleList)) { - moduleMetaData <- getModuleMetaData( - moduleFolder = getModuleFolder( - module = moduleList$module[i], - version = moduleList$version[i] - ) - ) - moduleTablePrefix <- moduleTablePrefix %>% - bind_rows(tibble::tibble( - moduleName = moduleList$module[i], - moduleVersion = moduleList$version[i], - tablePrefix = moduleMetaData$TablePrefix - )) - } - - invisible(moduleTablePrefix) -} diff --git a/R/RenvHelpers.R b/R/RenvHelpers.R deleted file mode 100644 index 822f7774..00000000 --- a/R/RenvHelpers.R +++ /dev/null @@ -1,232 +0,0 @@ -#' Compare two renv.lock files -#' -#' @description -#' Used to compare renv.lock files and return the results in a data.frame. -#' The return value will include a "full join" representation of the packages -#' across the two lock files. -#' -#' @param filename1 The first renv.lock file name -#' -#' @param filename2 The second renv.lock file name -#' -#' @return -#' A data.frame with the comparison of the rev.lock files -#' -#' @export -compareLockFiles <- function(filename1, filename2) { - # Read the lock files - lockfile1 <- renv::lockfile_read( - file = filename1 - ) - - lockfile2 <- renv::lockfile_read( - file = filename2 - ) - - # Compare lock files - lockfile1Packages <- lockFileToDataFrame(lockfile1) - names(lockfile1Packages) <- paste0("lockfile1", names(lockfile1Packages)) - lockfile2Packages <- lockFileToDataFrame(lockfile2) - names(lockfile2Packages) <- paste0("lockfile2", names(lockfile2Packages)) - mergedLockFilePackages <- merge( - x = lockfile1Packages, - y = lockfile2Packages, - by.x = "lockfile1Name", - by.y = "lockfile2Name", - all = TRUE - ) - return(mergedLockFilePackages) -} - -#' Synchronize renv.lock files and overwrite the target file -#' (read the description) -#' -#' @description -#' Used to synchronize the values from the "source of truth" renv.lock file to -#' the target renv.lock file. Packages are compared (by name) and if the version -#' of the package in the "source of truth" is greater the one found in the -#' target, the target renv.lock file will be updated. This function will -#' automatically update the target file. -#' -#' Version comparison is handled by the `semver` package and since most packages -#' use semantic versioning. When a package does not use semantic versioning, -#' a warning is provided so the user can review. -#' -#' @param sourceOfTruthLockFileName The renv.lock file to use as the source of -#' truth -#' -#' @param targetLockFileName The target renv.lock file that will be synced with -#' the source of truth -#' -#' @return -#' A data.frame containing the different packages and their version that -#' were involved in the synchronization process -#' -#' @export -syncLockFile <- function(sourceOfTruthLockFileName, targetLockFileName) { - findPackageByName <- function(list, packageName) { - index <- which(sapply(list, function(x) x$Package == packageName)) - return(index) - } - - # Read the lock files - sourceOfTruthLockFile <- renv::lockfile_read( - file = sourceOfTruthLockFileName - ) - targetLockFile <- renv::lockfile_read( - file = targetLockFileName - ) - - # Compare the lock files to get the differences in package versions - comparedLockFiles <- compareLockFiles( - filename1 = sourceOfTruthLockFileName, - filename2 = targetLockFileName - ) - verDiffs <- comparedLockFiles[!is.na(comparedLockFiles$lockfile2Version) & - comparedLockFiles$lockfile1Version != comparedLockFiles$lockfile2Version, ] - verDiffs <- verDiffs[!is.na(verDiffs$lockfile1Name), ] - - if (nrow(verDiffs) == 0) { - rlang::inform("Lock files are already in sync.") - return(invisible(NULL)) - } - - # Update the target lock file based on the source of truth - for (i in 1:nrow(verDiffs)) { - index <- findPackageByName(targetLockFile$Packages, verDiffs[i, ]$lockfile1Name) - tryCatch(expr = { - semverPattern <- "^\\d+\\.\\d+\\.\\d+(?:-[0-9A-Za-z-]+(?:\\.[0-9A-Za-z-]+)*)?(?:\\+[0-9A-Za-z-]+)?$" - sourceOfTruthVersion <- verDiffs[i, ]$lockfile1Version - targetVersion <- targetLockFile$Packages[[index]]$Version - if (grepl(semverPattern, sourceOfTruthVersion) && grepl(semverPattern, targetVersion)) { - sourceOfTruthVersion <- semver::parse_version(sourceOfTruthVersion) - targetVersion <- semver::parse_version(targetVersion) - if (sourceOfTruthVersion > targetVersion) { - rlang::inform( - message = paste(verDiffs[i, ]$lockfile1Name, "[", targetVersion, "->", sourceOfTruthVersion, "]") - ) - targetLockFile$Packages[[index]]$Version <- verDiffs[i, ]$lockfile1Version - if (!is.na(verDiffs[i, ]$lockfile1RemoteRef)) { - targetLockFile$Packages[[index]]$RemoteRef <- verDiffs[i, ]$lockfile1RemoteRef - } - } else { - rlang::inform( - message = paste(verDiffs[i, ]$lockfile1Name, "[ SKIPPING - ", targetVersion, ">", sourceOfTruthVersion, "]") - ) - } - } else { - rlang::warn(paste0("Package: [", verDiffs[i, ]$lockfile1Name, "] - version number could not be parsed. Please inspect manually as it may require an upgrade.")) - } - }, error = function(err) { - rlang::inform("An error occurred:", utils::str(err), "\n") - }) - } - - # Save the updated lock file - renv::lockfile_write( - lockfile = targetLockFile, - file = targetLockFileName - ) - - return(invisible(verDiffs)) -} - -#' Validate an renv.lock file to ensure it is ready for use by Strategus -#' -#' @description -#' Will check an renv.lock file for a module to verify that it only references -#' tagged packages and includes the packages required by Strategus. It will -#' also check for suggested packages that are useful for testing, such as -#' RSQLite. -#' -#' @param filename The renv.lock file to validate -#' -#' @export -validateLockFile <- function(filename) { - # Read the lock file - lockFile <- renv::lockfile_read( - file = filename - ) - # Create a data.frame from the renv.lock file - df <- lockFileToDataFrame( - lf = lockFile - ) - - # Check that the mandatory dependencies are met - message("Checking mandatory packages...", appendLF = F) - if (length(mandatoryPackages()) != nrow(df[df$Name %in% mandatoryPackages(), ])) { - missingPkgs <- setdiff(mandatoryPackages(), df[df$Name %in% mandatoryPackages(), ]$Name) - message("FAILED!") - message(" -- Missing the mandatory packages: ", paste(missingPkgs, collapse = ", ")) - message(" Please record these missing dependencies in your renv.lock file.") - } else { - message("PASSED!") - } - - # Check for suggested packages - message("Checking suggested packages...", appendLF = F) - if (length(suggestedPacakges()) != nrow(df[df$Name %in% suggestedPacakges(), ])) { - missingPkgs <- setdiff(suggestedPacakges(), df[df$Name %in% suggestedPacakges(), ]$Name) - message("WARNING!") - message(" -- Missing the suggested packages: ", paste(missingPkgs, collapse = ", ")) - message(" This is an optional set of dependencies so you may decide if you wish to have them in your renv.lock file.") - } else { - message("PASSED!") - } - - # Check that we're using declared versions of all packages - message("Checking all package using tagged versions in RemoteRef...", appendLF = F) - # Start by filtering out the CRAN Repository entries - dfFiltered <- df[tolower(df$Source) != "repository", ] - if (!all(grepl("^(v)?\\d+(\\.\\d+){2}$", dfFiltered$RemoteRef))) { - message("FAILED! Please check the following packages:") - problemPkgs <- dfFiltered[!grepl("^(v)?\\d+(\\.\\d+){2}$", dfFiltered$RemoteRef), ] - for (i in 1:nrow(problemPkgs)) { - message(paste0(" -- Package: ", problemPkgs$Name[[i]], "; RemoteRef: ", problemPkgs$RemoteRef[[i]])) - } - message(" Please ensure you are only including tagged versions of package dependencies in your renv.lock file.") - } else { - message("PASSED!") - } -} - -#' List of mandatory packages for a Strategus module -#' -#' @keywords internal -mandatoryPackages <- function() { - return(c( - "CohortGenerator", - "DatabaseConnector", - "keyring", - "ParallelLogger", - "renv", - "SqlRender" - )) -} - -#' List of suggested packages for a Strategus module -#' -#' @keywords internal -suggestedPacakges <- function() { - return(c("RSQLite")) -} - - -#' Convert a lock file to a data.frame -#' -#' @keywords internal -lockFileToDataFrame <- function(lf) { - df <- data.frame() - for (i in 1:length(lf$Packages)) { - df <- rbind( - df, - data.frame( - Name = lf$Packages[[i]]$Package, - Version = lf$Packages[[i]]$Version, - Source = lf$Packages[[i]]$Source, - RemoteRef = ifelse(is.null(lf$Packages[[i]]$RemoteRef), yes = NA, no = lf$Packages[[i]]$RemoteRef) - ) - ) - } - return(df) -} diff --git a/R/ResultDataModel.R b/R/ResultDataModel.R new file mode 100644 index 00000000..60714b2b --- /dev/null +++ b/R/ResultDataModel.R @@ -0,0 +1,91 @@ +# Copyright 2024 Observational Health Data Sciences and Informatics +# +# This file is part of Strategus +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#' Create Result Data Model +#' +#' @description +#' Use this at the study design stage to create data models for modules +#' This functions loads modules and executes any custom code to create +#' the results data model in the specified schema in the results database. +#' +#' @template AnalysisSpecifications +#' @param resultsDataModelSettings The results data model settings as created using [@seealso [createResultsDataModelSettings()]] +#' @template resultsConnectionDetails +#' +#' @export +createResultDataModel <- function(analysisSpecifications, + resultsDataModelSettings, + resultsConnectionDetails) { + errorMessages <- checkmate::makeAssertCollection() + checkmate::assertClass(analysisSpecifications, "AnalysisSpecifications", add = errorMessages) + checkmate::assertClass(resultsDataModelSettings, "ResultsDataModelSettings", add = errorMessages) + checkmate::assertClass(resultsConnectionDetails, "ConnectionDetails", add = errorMessages) + checkmate::reportAssertions(collection = errorMessages) + + # The DatabaseMetaData is a special case... + .createDatabaseMetadataResultsDataModel( + resultsConnectionDetails = resultsConnectionDetails, + resultsDataModelSettings = resultsDataModelSettings + ) + + for (i in 1:length(analysisSpecifications$moduleSpecifications)) { + moduleName <- analysisSpecifications$moduleSpecifications[[i]]$module + moduleObj <- get(moduleName)$new() + moduleObj$createResultsDataModel( + resultsConnectionDetails = resultsConnectionDetails, + resultsDatabaseSchema = resultsDataModelSettings$resultsDatabaseSchema + ) + } +} + +#' Upload results +#' +#' @description +#' +#' Upload the results for a given analysis +#' +#' @template AnalysisSpecifications +#' @template resultsDataModelSettings +#' @template resultsConnectionDetails +#' +#' @export +uploadResults <- function(analysisSpecifications, + resultsDataModelSettings, + resultsConnectionDetails) { + errorMessages <- checkmate::makeAssertCollection() + checkmate::assertClass(analysisSpecifications, "AnalysisSpecifications", add = errorMessages) + checkmate::assertClass(resultsDataModelSettings, "ResultsDataModelSettings", add = errorMessages) + checkmate::assertClass(resultsConnectionDetails, "ConnectionDetails", add = errorMessages) + checkmate::reportAssertions(collection = errorMessages) + + # The DatabaseMetaData is a special case... + .uploadDatabaseMetadata( + resultsConnectionDetails = resultsConnectionDetails, + resultsDataModelSettings = resultsDataModelSettings + ) + + for (i in 1:length(analysisSpecifications$moduleSpecifications)) { + moduleName <- analysisSpecifications$moduleSpecifications[[i]]$module + moduleObj <- get(moduleName)$new() + moduleObj$uploadResults( + resultsConnectionDetails = resultsConnectionDetails, + analysisSpecifications = analysisSpecifications, + resultsDataModelSettings = resultsDataModelSettings + ) + } +} + + diff --git a/R/ResultModelCreation.R b/R/ResultModelCreation.R deleted file mode 100644 index 5c13455a..00000000 --- a/R/ResultModelCreation.R +++ /dev/null @@ -1,295 +0,0 @@ -# Copyright 2024 Observational Health Data Sciences and Informatics -# -# This file is part of Strategus -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -#' Create Result Data Models -#' @description -#' Use this at the study design stage to create data models for modules -#' This functions loads modules and executes any custom code to create schemas in a results database -#' If recreate is set to TRUE all existing data will be removed, otherwise -#' -#' @inheritParams execute -#' -#' @export -createResultDataModels <- function(analysisSpecifications, - executionSettings, - executionScriptFolder = NULL, - keyringName = NULL, - restart = FALSE, - enforceModuleDependencies = TRUE) { - errorMessages <- checkmate::makeAssertCollection() - keyringList <- keyring::keyring_list() - checkmate::assertClass(analysisSpecifications, "AnalysisSpecifications", add = errorMessages) - checkmate::assertClass(executionSettings, "ResultsExecutionSettings", add = errorMessages) - checkmate::assertChoice(x = keyringName, choices = keyringList$keyring, null.ok = TRUE, add = errorMessages) - checkmate::reportAssertions(collection = errorMessages) - - modules <- ensureAllModulesInstantiated( - analysisSpecifications = analysisSpecifications, - enforceModuleDependencies = enforceModuleDependencies - ) - - if (isFALSE(modules$allModulesInstalled)) { - stop("Stopping execution due to module issues") - } - - - if (is.null(executionScriptFolder)) { - executionScriptFolder <- tempfile("strategusTempSettings") - dir.create(executionScriptFolder) - on.exit(unlink(executionScriptFolder, recursive = TRUE)) - } else if (!restart) { - if (dir.exists(executionScriptFolder)) { - unlink(executionScriptFolder, recursive = TRUE) - } - dir.create(executionScriptFolder, recursive = TRUE) - } - # Normalize path to convert from relative to absolute path - executionScriptFolder <- normalizePath(executionScriptFolder, mustWork = F) - - script <- file.path(executionScriptFolder, "SchemaScript.R") - ## - # Code execution inside targets block - ## - targets::tar_script( - { - ## - # Generated by Strategus - not advisable to edit by hand - ## - analysisSpecificationsLoad <- readRDS(analysisSpecificationsFileName) - - targets::tar_option_set(packages = c("Strategus", "keyring"), imports = c("Strategus", "keyring")) - targetList <- list( - targets::tar_target(analysisSpecifications, readRDS(analysisSpecificationsFileName)), - targets::tar_target(executionSettings, readRDS(executionSettingsFileName)), - targets::tar_target(keyringSettings, readRDS(keyringSettingsFileName)) - ) - - for (i in 1:length(analysisSpecificationsLoad$moduleSpecifications)) { - moduleSpecification <- analysisSpecificationsLoad$moduleSpecifications[[i]] - targetName <- sprintf("%s_%d_schema_creation", moduleSpecification$module, i) - - # Use of tar_target_raw allows dynamic names - targetList[[length(targetList) + 1]] <- targets::tar_target_raw(targetName, - substitute(Strategus:::runSchemaCreation(analysisSpecifications, keyringSettings, i, executionSettings), - env = list(i = i) - ), - deps = c("analysisSpecifications", "keyringSettings", "executionSettings") - ) - } - targetList - }, - script = script - ) - - # Store settings objects in the temp folder so they are available in targets - analysisSpecificationsFileName <- .formatAndNormalizeFilePathForScript(file.path(executionScriptFolder, "analysisSpecifications.rds")) - saveRDS(analysisSpecifications, analysisSpecificationsFileName) - executionSettingsFileName <- .formatAndNormalizeFilePathForScript(file.path(executionScriptFolder, "executionSettings.rds")) - saveRDS(executionSettings, executionSettingsFileName) - keyringSettingsFileName <- .formatAndNormalizeFilePathForScript(file.path(executionScriptFolder, "keyringSettings.rds")) - saveRDS(list(keyringName = keyringName), keyringSettingsFileName) - - # Generate target names by module type - moduleToTargetNames <- list() - for (i in 1:length(analysisSpecifications$moduleSpecifications)) { - moduleSpecification <- analysisSpecifications$moduleSpecifications[[i]] - targetName <- sprintf("%s_%d", moduleSpecification$module, i) - moduleToTargetNames[[length(moduleToTargetNames) + 1]] <- tibble( - module = moduleSpecification$module, - targetName = targetName - ) - } - moduleToTargetNames <- bind_rows(moduleToTargetNames) - moduleToTargetNamesFileName <- .formatAndNormalizeFilePathForScript(file.path(executionScriptFolder, "moduleTargetNames.rds")) - saveRDS(moduleToTargetNames, moduleToTargetNamesFileName) - - # Settings required inside script. There is probably a much cleaner way of doing this - writeLines(c( - sprintf("analysisSpecificationsFileName <- '%s'", analysisSpecificationsFileName), - sprintf("executionSettingsFileName <- '%s'", executionSettingsFileName), - sprintf("keyringSettingsFileName <- '%s'", keyringSettingsFileName), - sprintf("moduleToTargetNamesFileName <- '%s'", moduleToTargetNamesFileName), - readLines(script) - ), script) - - targets::tar_make(script = script) -} - - -#' Create module(s) result data model -#' @description -#' This function will create the results data model for the modules in the -#' `analysisSpecifications`. A module can implement its own results data model -#' creation function by implementing the function `createDataModelSchema` in -#' its Main.R. The default behavior is to use the `ResultsModelManager` to create -#' the results data model based on the `resultsDataModelSpecification.csv` in the -#' module's results folder. -#' -#' @template AnalysisSpecifications -#' @param keyringSettings The keyringSettings from the executionSettings context -#' @param moduleIndex The index of the module in the analysis specification -#' @template executionSettings -#' @param ... For future expansion -runSchemaCreation <- function(analysisSpecifications, keyringSettings, moduleIndex, executionSettings, ...) { - checkmate::assert_multi_class(x = executionSettings, classes = c("ResultsExecutionSettings")) - moduleSpecification <- analysisSpecifications$moduleSpecifications[[moduleIndex]] - - module <- moduleSpecification$module - version <- moduleSpecification$version - remoteRepo <- moduleSpecification$remoteRepo - remoteUsername <- moduleSpecification$remoteUsername - moduleInstallation <- verifyModuleInstallation(module, version, silent = TRUE) - moduleFolder <- moduleInstallation$moduleFolder - if (isFALSE(moduleInstallation$moduleInstalled)) { - stop("Stopping since module is not properly installed!") - } - - # Create job context - moduleExecutionSettings <- executionSettings - moduleExecutionSettings$workSubFolder <- file.path(executionSettings$workFolder, sprintf("%s_%d", module, moduleIndex)) - moduleExecutionSettings$resultsSubFolder <- file.path(executionSettings$resultsFolder, sprintf("%s_%d", module, moduleIndex)) - - if (!dir.exists(moduleExecutionSettings$workSubFolder)) { - dir.create(moduleExecutionSettings$workSubFolder, recursive = TRUE) - } - if (!dir.exists(moduleExecutionSettings$resultsSubFolder)) { - dir.create(moduleExecutionSettings$resultsSubFolder, recursive = TRUE) - } - - jobContext <- list( - sharedResources = analysisSpecifications$sharedResources, - settings = moduleSpecification$settings, - moduleExecutionSettings = moduleExecutionSettings, - keyringSettings = keyringSettings - ) - jobContextFileName <- .formatAndNormalizeFilePathForScript(file.path(moduleExecutionSettings$workSubFolder, "jobContext.rds")) - saveRDS(jobContext, jobContextFileName) - dataModelExportPath <- .formatAndNormalizeFilePathForScript(file.path(moduleExecutionSettings$workSubFolder, "resultsDataModelSpecification.csv")) - - - doneFile <- .formatAndNormalizeFilePathForScript(file.path(jobContext$moduleExecutionSettings$resultsSubFolder, "schema.creation")) - if (file.exists(doneFile)) { - unlink(doneFile) - } - ## code executed inside renv only has scoped variables - withModuleRenv( - code = { - createDataModelSchema <- NULL - - getDataModelSpecifications <- function(...) { - if (file.exists("resultsDataModelSpecification.csv")) { - res <- CohortGenerator::readCsv( - file = "resultsDataModelSpecification.csv" - ) - return(res) - } - return(NULL) - } - - source("Main.R") - - jobContext <- readRDS(jobContextFileName) - specifications <- getDataModelSpecifications(jobContext) - - if (Sys.getenv("FORCE_RENV_USE", "") == "TRUE") { - renv::use(lockfile = "renv.lock") - } - - ParallelLogger::addDefaultFileLogger(jobContext$moduleExecutionSettings$logFileName) - ParallelLogger::addDefaultFileLogger(file.path(jobContext$moduleExecutionSettings$resultsSubFolder, "log.txt")) - ParallelLogger::addDefaultErrorReportLogger(file.path(jobContext$moduleExecutionSettings$resultsSubFolder, "errorReportR.txt")) - - message("START SCHEMA CREATION: ", moduleName) - # Main.R can override default behaviour by implementing this function - if (is.function(createDataModelSchema)) { - # If the keyring is locked, unlock it, set the value and then re-lock it - keyringName <- jobContext$keyringSettings$keyringName - keyringLocked <- unlockKeyring(keyringName = keyringName) - - resultsConnectionDetails <- keyring::key_get(jobContext$moduleExecutionSettings$resultsConnectionDetailsReference, keyring = keyringName) - resultsConnectionDetails <- ParallelLogger::convertJsonToSettings(resultsConnectionDetails) - resultsConnectionDetails <- do.call(DatabaseConnector::createConnectionDetails, resultsConnectionDetails) - jobContext$moduleExecutionSettings$resultsConnectionDetails <- resultsConnectionDetails - - createDataModelSchema(jobContext) - - if (keyringLocked) { - keyring::keyring_lock(keyring = keyringName) - } - writeLines("schema.created", doneFile) - } else if (is.data.frame(specifications)) { - # Export schema to readable location - CohortGenerator::writeCsv( - x = specifications, - file = dataModelExportPath, - warnOnCaseMismatch = FALSE - ) - writeLines("specifications.written", doneFile) - } else { - warning("Module does not include data specifications file or createDataModelSchema function") - CohortGenerator::writeCsv( - x = specifications, - file = dataModelExportPath, - warnOnCaseMismatch = FALSE - ) - writeLines("specifications.not.written", doneFile) - } - message("FINISH SCHEMA CREATION: ", moduleName) - - ParallelLogger::unregisterLogger("DEFAULT_FILE_LOGGER", silent = TRUE) - ParallelLogger::unregisterLogger("DEFAULT_ERRORREPORT_LOGGER", silent = TRUE) - }, - moduleFolder = moduleFolder, - tempScriptFile = file.path(moduleExecutionSettings$workSubFolder, "SchemaCreation.R"), - injectVars = list( - jobContextFileName = jobContextFileName, - dataModelExportPath = dataModelExportPath, - moduleName = module, - doneFile = doneFile - ) - ) - - workStatus <- readLines(doneFile) - if (workStatus == "specifications.written") { - moduleInfo <- ParallelLogger::loadSettingsFromJson(file.path(moduleFolder, "MetaData.json")) - keyringName <- jobContext$keyringSettings$keyringName - keyringLocked <- Strategus::unlockKeyring(keyringName = keyringName) - - resultsConnectionDetails <- keyring::key_get(jobContext$moduleExecutionSettings$resultsConnectionDetailsReference, keyring = keyringName) - resultsConnectionDetails <- ParallelLogger::convertJsonToSettings(resultsConnectionDetails) - resultsConnectionDetails <- do.call(DatabaseConnector::createConnectionDetails, resultsConnectionDetails) - - connection <- DatabaseConnector::connect(resultsConnectionDetails) - on.exit(DatabaseConnector::disconnect(connection)) - - sql <- ResultModelManager::generateSqlSchema(csvFilepath = dataModelExportPath) - DatabaseConnector::renderTranslateExecuteSql(connection, - sql, - table_prefix = moduleInfo$TablePrefix, - database_schema = jobContext$moduleExecutionSettings$resultsDatabaseSchema - ) - - if (keyringLocked) { - keyring::keyring_lock(keyring = keyringName) - } - } else if (workStatus == "schema.created") { - message("Result schema creation handled inside module execution envrionment") - } else { - message("Schema not created for module") - } - - return(list(dummy = 123)) -} diff --git a/R/ResultsUpload.R b/R/ResultsUpload.R deleted file mode 100644 index ece4ec07..00000000 --- a/R/ResultsUpload.R +++ /dev/null @@ -1,196 +0,0 @@ -# Copyright 2024 Observational Health Data Sciences and Informatics -# -# This file is part of Strategus -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# Results upload callbacks for inserting results in to a database -runResultsUpload <- function(analysisSpecifications, keyringSettings, moduleIndex, executionSettings, ...) { - checkmate::assert_multi_class(x = executionSettings, classes = c("ExecutionSettings")) - moduleSpecification <- analysisSpecifications$moduleSpecifications[[moduleIndex]] - - module <- moduleSpecification$module - version <- moduleSpecification$version - remoteRepo <- moduleSpecification$remoteRepo - remoteUsername <- moduleSpecification$remoteUsername - moduleInstallation <- verifyModuleInstallation(module, version, silent = TRUE) - moduleFolder <- moduleInstallation$moduleFolder - if (isFALSE(moduleInstallation$moduleInstalled)) { - stop("Stopping since module is not properly installed!") - } - - # Create job context - moduleExecutionSettings <- executionSettings - moduleExecutionSettings$workSubFolder <- normalizePath(file.path(executionSettings$workFolder, sprintf("%s_%d", module, moduleIndex)), mustWork = F) - moduleExecutionSettings$resultsSubFolder <- normalizePath(file.path(executionSettings$resultsFolder, sprintf("%s_%d", module, moduleIndex)), mustWork = F) - - if (!is(executionSettings, "CdmExecutionSettings")) { - stop("Unhandled executionSettings class! Must be CdmExecutionSettings instance") - } - - if (!dir.exists(moduleExecutionSettings$resultsSubFolder)) { - stop("results not found") - } - jobContext <- list( - sharedResources = analysisSpecifications$sharedResources, - settings = moduleSpecification$settings, - moduleExecutionSettings = moduleExecutionSettings, - keyringSettings = keyringSettings - ) - jobContextFileName <- .formatAndNormalizeFilePathForScript(file.path(moduleExecutionSettings$workSubFolder, "jobContext.rds")) - saveRDS(jobContext, jobContextFileName) - dataModelExportPath <- .formatAndNormalizeFilePathForScript(file.path(moduleExecutionSettings$workSubFolder, "resultsDataModelSpecification.csv")) - - doneFile <- .formatAndNormalizeFilePathForScript(file.path(jobContext$moduleExecutionSettings$resultsSubFolder, "results.uploaded")) - if (file.exists(doneFile)) { - unlink(doneFile) - } - - tempScriptFile <- file.path(moduleExecutionSettings$workSubFolder, "UploadScript.R") - ParallelLogger::addDefaultFileLogger(jobContext$moduleExecutionSettings$logFileName) - on.exit(ParallelLogger::unregisterLogger("DEFAULT_FILE_LOGGER", silent = TRUE)) - - ## - # Module space executed code - ## - withModuleRenv( - { - uploadResultsCallback <- NULL - - getDataModelSpecifications <- function(...) { - ParallelLogger::logInfo("Getting result model specification") - rdmsFilePath <- file.path(jobContext$moduleExecutionSettings$resultsSubFolder, "resultsDataModelSpecification.csv") - if (file.exists(rdmsFilePath)) { - res <- CohortGenerator::readCsv( - file = rdmsFilePath - ) - return(res) - } - ParallelLogger::logInfo("No result model specification found") - return(NULL) - } - source("Main.R") - moduleInfo <- ParallelLogger::loadSettingsFromJson("MetaData.json") - jobContext <- readRDS(jobContextFileName) - specifications <- getDataModelSpecifications(jobContext) - - ParallelLogger::addDefaultFileLogger(jobContext$moduleExecutionSettings$logFileName) - ParallelLogger::addDefaultFileLogger(file.path(jobContext$moduleExecutionSettings$resultsSubFolder, "log.txt")) - ParallelLogger::addDefaultErrorReportLogger(file.path(jobContext$moduleExecutionSettings$resultsSubFolder, "errorReportR.txt")) - - if (Sys.getenv("FORCE_RENV_USE", "") == "TRUE") { - renv::use(lockfile = "renv.lock") - } - - message("START MODULE RESULTS UPLOAD: ", moduleName) - # Override default behaviour and do module specific upload inside module context? - if (is.function(uploadResultsCallback)) { - ParallelLogger::logInfo("Calling module result upload functionality") - # If the keyring is locked, unlock it, set the value and then re-lock it - ParallelLogger::logInfo("-- Getting result database credentials") - keyringName <- jobContext$keyringSettings$keyringName - keyringLocked <- unlockKeyring(keyringName = keyringName) - resultsConnectionDetails <- keyring::key_get(jobContext$moduleExecutionSettings$resultsConnectionDetailsReference, keyring = keyringName) - resultsConnectionDetails <- ParallelLogger::convertJsonToSettings(resultsConnectionDetails) - resultsConnectionDetails <- do.call(DatabaseConnector::createConnectionDetails, resultsConnectionDetails) - jobContext$moduleExecutionSettings$resultsConnectionDetails <- resultsConnectionDetails - ParallelLogger::logInfo("-- Executing upload callback") - uploadResultsCallback(jobContext) - if (keyringLocked) { - keyring::keyring_lock(keyring = keyringName) - } - ParallelLogger::logInfo("-- Upload completed") - writeLines("results.uploaded", doneFile) - } else if (is.null(specifications)) { - ParallelLogger::logInfo("No result specifications found, assuming module has produced no results") - # NO spect file Status - warning("data model specifications not loaded from module - skipping results upload") - writeLines("no.spec.found", doneFile) - } else { - # Spec file written - ParallelLogger::logInfo("Writing spec for result upload outside of module context") - CohortGenerator::writeCsv( - x = specifications, - file = dataModelExportPath, - warnOnFileNameCaseMismatch = FALSE - ) - writeLines("specifications.written", doneFile) - } - - message("FINISH MODULE RESULTS UPLOAD: ", moduleName) - ParallelLogger::unregisterLogger("DEFAULT_FILE_LOGGER", silent = TRUE) - ParallelLogger::unregisterLogger("DEFAULT_ERRORREPORT_LOGGER", silent = TRUE) - }, - moduleFolder = moduleFolder, - tempScriptFile = tempScriptFile, - injectVars = list( - jobContextFileName = jobContextFileName, - dataModelExportPath = dataModelExportPath, - moduleName = module, - doneFile = doneFile - ) - ) - ## - # end Module executed code - ## - if (!file.exists(doneFile)) { - message <- paste( - "Module did not complete. To debug:", - sprintf(" rstudioapi::openProject('%s', newSession = TRUE)", moduleFolder), - sprintf(" file.edit('%s')", tempScriptFile), - sep = "\n" - ) - stop(message) - } - - workStatus <- readLines(doneFile) - - if (workStatus == "specifications.written") { - message("Uploading results according to module specification") - specifications <- CohortGenerator::readCsv(dataModelExportPath) - - keyringName <- jobContext$keyringSettings$keyringName - keyringLocked <- Strategus::unlockKeyring(keyringName = keyringName) - - message("Getting result database credentials") - resultsConnectionDetails <- keyring::key_get(jobContext$moduleExecutionSettings$resultsConnectionDetailsReference, keyring = keyringName) - resultsConnectionDetails <- ParallelLogger::convertJsonToSettings(resultsConnectionDetails) - resultsConnectionDetails <- do.call(DatabaseConnector::createConnectionDetails, resultsConnectionDetails) - jobContext$moduleExecutionSettings$resultsConnectionDetails <- resultsConnectionDetails - - message("Calling RMM for upload") - ResultModelManager::uploadResults( - connectionDetails = jobContext$moduleExecutionSettings$resultsConnectionDetails, - schema = jobContext$moduleExecutionSettings$resultsDatabaseSchema, - resultsFolder = jobContext$moduleExecutionSettings$resultsSubFolder, - forceOverWriteOfSpecifications = FALSE, - purgeSiteDataBeforeUploading = FALSE, - databaseIdentifierFile = file.path(executionSettings$resultsFolder, "DatabaseMetaData", "database_meta_data.csv"), - runCheckAndFixCommands = FALSE, - warnOnMissingTable = TRUE, - specifications = specifications - ) - - message("Upload completed") - if (keyringLocked) { - keyring::keyring_lock(keyring = keyringName) - } - } else if (workStatus == "results.uploaded") { - message("Result upload handled inside module execution envrionment") - } else { - message("Results not uploaded for module") - } - - return(list(dummy = 123)) -} diff --git a/R/RunModule.R b/R/RunModule.R deleted file mode 100644 index 5aebcfa1..00000000 --- a/R/RunModule.R +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright 2024 Observational Health Data Sciences and Informatics -# -# This file is part of Strategus -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Note: Using S3 for consistency with settings objects in PLP, CohortMethod, and -# FeatureExtraction. If we want to use S4 or R6 we need to first adapt those -# packages. This will be difficult, since these settings objects are used throughout -# these packages, and are for example used in do.call() calls. We should also -# carefully consider serialization and deserialization to JSON, which currently -# uses custom functionality in ParallelLogger to maintain object attributes. - -runModule <- function(analysisSpecifications, keyringSettings, moduleIndex, executionSettings, ...) { - checkmate::assert_multi_class(x = executionSettings, classes = c("CdmExecutionSettings", "ResultsExecutionSettings")) - moduleSpecification <- analysisSpecifications$moduleSpecifications[[moduleIndex]] - module <- moduleSpecification$module - version <- moduleSpecification$version - remoteRepo <- moduleSpecification$remoteRepo - remoteUsername <- moduleSpecification$remoteUsername - moduleInstallation <- verifyModuleInstallation(module, version) - moduleFolder <- moduleInstallation$moduleFolder - if (isFALSE(moduleInstallation$moduleInstalled)) { - stop(paste0("Stopping since module is not properly installed! Module folder: ", moduleInstallation$moduleFolder)) - } - - # Create job context - moduleExecutionSettings <- executionSettings - moduleExecutionSettings$workSubFolder <- file.path(executionSettings$workFolder, sprintf("%s_%d", module, moduleIndex)) - moduleExecutionSettings$resultsSubFolder <- file.path(executionSettings$resultsFolder, sprintf("%s_%d", module, moduleIndex)) - - if (!dir.exists(moduleExecutionSettings$workSubFolder)) { - dir.create(moduleExecutionSettings$workSubFolder, recursive = TRUE) - } - if (!dir.exists(moduleExecutionSettings$resultsSubFolder)) { - dir.create(moduleExecutionSettings$resultsSubFolder, recursive = TRUE) - } - jobContext <- list( - sharedResources = analysisSpecifications$sharedResources, - settings = moduleSpecification$settings, - moduleExecutionSettings = moduleExecutionSettings, - keyringSettings = keyringSettings - ) - jobContextFileName <- .formatAndNormalizeFilePathForScript(file.path(moduleExecutionSettings$workSubFolder, "jobContext.rds")) - saveRDS(jobContext, jobContextFileName) - - tempScriptFile <- .formatAndNormalizeFilePathForScript(file.path(moduleExecutionSettings$workSubFolder, "StrategusScript.R")) - doneFile <- .formatAndNormalizeFilePathForScript(file.path(jobContext$moduleExecutionSettings$resultsSubFolder, "done")) - if (file.exists(doneFile)) { - unlink(doneFile) - } - - if (is(executionSettings, "CdmExecutionSettings")) { - isCdmExecution <- TRUE - } else if (is(executionSettings, "ResultsExecutionSettings")) { - isCdmExecution <- FALSE - } else { - stop("Unhandled executionSettings class! Must be one of the following: CdmExecutionSettings, ResultsExecutionSettings") - } - withModuleRenv( - code = { - ############################ - # Generated by strategus - do not modify by hand - ############################ - source("Main.R") - jobContext <- readRDS(jobContextFileName) - - keyringName <- jobContext$keyringSettings$keyringName - # unlockKeyring will be injected automatically - keyringLocked <- unlockKeyring(keyringName = keyringName) - - ParallelLogger::addDefaultFileLogger(jobContext$moduleExecutionSettings$logFileName) - ParallelLogger::addDefaultFileLogger(file.path(jobContext$moduleExecutionSettings$resultsSubFolder, "log.txt")) - ParallelLogger::addDefaultErrorReportLogger(file.path(jobContext$moduleExecutionSettings$resultsSubFolder, "errorReport.R")) - - options(andromedaTempFolder = file.path(jobContext$moduleExecutionSettings$workFolder, "andromedaTemp")) - options(sqlRenderTempEmulationSchema = jobContext$moduleExecutionSettings$tempEmulationSchema) - options(databaseConnectorIntegerAsNumeric = jobContext$moduleExecutionSettings$integerAsNumeric) - options(databaseConnectorInteger64AsNumeric = jobContext$moduleExecutionSettings$integer64AsNumeric) - - if (Sys.getenv("FORCE_RENV_USE", "") == "TRUE") { - renv::use(lockfile = "renv.lock") - } - - # NOTE: injected variable isResultsExecution - will look strange outside of Strategus definition - # NOTE: retrieveConnectionDetails function is injected by withModuleRenv - if (isCdmExecution) { - connectionDetails <- retrieveConnectionDetails( - connectionDetailsReference = jobContext$moduleExecutionSettings$connectionDetailsReference, - keyringName = keyringName - ) - jobContext$moduleExecutionSettings$connectionDetails <- connectionDetails - } else { - resultsConnectionDetails <- retrieveConnectionDetails( - connectionDetailsReference = jobContext$moduleExecutionSettings$resultsConnectionDetailsReference, - keyringName = keyringName - ) - jobContext$moduleExecutionSettings$resultsConnectionDetails <- resultsConnectionDetails - } - if (keyringLocked) { - keyring::keyring_lock(keyring = keyringName) - } - message("START MODULE RUN: ", moduleName) - execute(jobContext) - message("FINISH MODULE RUN: ", moduleName) - - ParallelLogger::unregisterLogger("DEFAULT_FILE_LOGGER", silent = TRUE) - ParallelLogger::unregisterLogger("DEFAULT_ERRORREPORT_LOGGER", silent = TRUE) - writeLines("done", doneFile) - }, - tempScriptFile = tempScriptFile, - moduleFolder = moduleFolder, - injectVars = list( - doneFile = doneFile, - isCdmExecution = isCdmExecution, - jobContextFileName = jobContextFileName, - moduleName = module - ) - ) - - if (!file.exists(doneFile)) { - message <- paste( - "Module did not complete. To debug:", - sprintf(" rstudioapi::openProject('%s', newSession = TRUE)", moduleFolder), - sprintf(" file.edit('%s')", tempScriptFile), - sep = "\n" - ) - stop(message) - } - - return(list(dummy = 123)) -} diff --git a/R/Settings.R b/R/Settings.R index 5503d098..cbc08e41 100644 --- a/R/Settings.R +++ b/R/Settings.R @@ -21,25 +21,9 @@ # carefully consider serialization and deserialization to JSON, which currently # uses custom functionality in ParallelLogger to maintain object attributes. -#' Create an empty analysis specifications object. -#' -#' @return -#' An object of type `AnalysisSpecifications`. -#' -#' @export -createEmptyAnalysisSpecificiations <- function() { - analysisSpecifications <- list( - sharedResources = list(), - moduleSpecifications = list() - ) - class(analysisSpecifications) <- "AnalysisSpecifications" - return(analysisSpecifications) -} - #' Add shared resources to analysis specifications #' -#' @param analysisSpecifications An object of type `AnalysisSpecifications` as created -#' by [createEmptyAnalysisSpecificiations()]. +#' @template analysisSpecifications #' @param sharedResources An object of type `SharedResources`. #' #' @return @@ -58,9 +42,8 @@ addSharedResources <- function(analysisSpecifications, sharedResources) { #' Add module specifications to analysis specifications #' -#' @param analysisSpecifications An object of type `AnalysisSpecifications` as created -#' by [createEmptyAnalysisSpecificiations()]. -#' @param moduleSpecifications An object of type `ModuleSpecifications`. +#' @template analysisSpecifications +#' @template moduleSpecifications #' #' @return #' Returns the `analysisSpecifications` object with the module specifications added. @@ -76,10 +59,187 @@ addModuleSpecifications <- function(analysisSpecifications, moduleSpecifications return(analysisSpecifications) } +#' Add Characterization module specifications to analysis specifications +#' +#' @template analysisSpecifications +#' @param moduleSpecifications An object of type `CharacterizationModule`. +#' +#' @return +#' Returns the `analysisSpecifications` object with the module specifications added. +#' +#' @export +addCharacterizationModuleSpecifications <- function(analysisSpecifications, moduleSpecifications) { + return( + addAndValidateModuleSpecifications( + moduleName = "CharacterizationModule", + analysisSpecifications = analysisSpecifications, + moduleSpecifications = moduleSpecifications + ) + ) +} + +#' Add Cohort Diagnostics module specifications to analysis specifications +#' +#' @template analysisSpecifications +#' @param moduleSpecifications An object of type `CohortDiagnosticsModule`. +#' +#' @return +#' Returns the `analysisSpecifications` object with the module specifications added. +#' +#' @export +addCohortDiagnosticsModuleSpecifications <- function(analysisSpecifications, moduleSpecifications) { + return( + addAndValidateModuleSpecifications( + moduleName = "CohortDiagnosticsModule", + analysisSpecifications = analysisSpecifications, + moduleSpecifications = moduleSpecifications + ) + ) +} + +#' Add Cohort Generator module specifications to analysis specifications +#' +#' @template analysisSpecifications +#' @param moduleSpecifications An object of type `CohortGeneratorModule`. +#' +#' @return +#' Returns the `analysisSpecifications` object with the module specifications added. +#' +#' @export +addCohortGeneratorModuleSpecifications <- function(analysisSpecifications, moduleSpecifications) { + return( + addAndValidateModuleSpecifications( + moduleName = "CohortGeneratorModule", + analysisSpecifications = analysisSpecifications, + moduleSpecifications = moduleSpecifications + ) + ) +} + +#' Add Cohort Incidence module specifications to analysis specifications +#' +#' @template analysisSpecifications +#' @param moduleSpecifications An object of type `CohortIncidenceModule`. +#' +#' @return +#' Returns the `analysisSpecifications` object with the module specifications added. +#' +#' @export +addCohortIncidenceModuleSpecifications <- function(analysisSpecifications, moduleSpecifications) { + return( + addAndValidateModuleSpecifications( + moduleName = "CohortIncidenceModule", + analysisSpecifications = analysisSpecifications, + moduleSpecifications = moduleSpecifications + ) + ) +} + +#' Add Cohort Method module specifications to analysis specifications +#' +#' @template analysisSpecifications +#' @param moduleSpecifications An object of type `CohortMethodModule`. +#' +#' @return +#' Returns the `analysisSpecifications` object with the module specifications added. +#' +#' @export +addCohortMethodeModuleSpecifications <- function(analysisSpecifications, moduleSpecifications) { + return( + addAndValidateModuleSpecifications( + moduleName = "CohortMethodModule", + analysisSpecifications = analysisSpecifications, + moduleSpecifications = moduleSpecifications + ) + ) +} + +#' Add Evidence Synthesis module specifications to analysis specifications +#' +#' @template analysisSpecifications +#' @param moduleSpecifications An object of type `EvidenceSynthesisModule`. +#' +#' @return +#' Returns the `analysisSpecifications` object with the module specifications added. +#' +#' @export +addEvidenceSynthesisModuleSpecifications <- function(analysisSpecifications, moduleSpecifications) { + return( + addAndValidateModuleSpecifications( + moduleName = "EvidenceSynthesisModule", + analysisSpecifications = analysisSpecifications, + moduleSpecifications = moduleSpecifications + ) + ) +} + +#' Add Patient Level Prediction module specifications to analysis specifications +#' +#' @template analysisSpecifications +#' @param moduleSpecifications An object of type `PatientLevelPredictionModule`. +#' +#' @return +#' Returns the `analysisSpecifications` object with the module specifications added. +#' +#' @export +addPatientLevelPredictionModuleSpecifications <- function(analysisSpecifications, moduleSpecifications) { + return( + addAndValidateModuleSpecifications( + moduleName = "PatientLevelPredictionModule", + analysisSpecifications = analysisSpecifications, + moduleSpecifications = moduleSpecifications + ) + ) +} + +#' Add Self Controlled Case Series Module module specifications to analysis specifications +#' +#' @template analysisSpecifications +#' @param moduleSpecifications An object of type `SelfControlledCaseSeriesModule`. +#' +#' @return +#' Returns the `analysisSpecifications` object with the module specifications added. +#' +#' @export +addSelfControlledCaseSeriesModuleSpecifications <- function(analysisSpecifications, moduleSpecifications) { + return( + addAndValidateModuleSpecifications( + moduleName = "SelfControlledCaseSeriesModule", + analysisSpecifications = analysisSpecifications, + moduleSpecifications = moduleSpecifications + ) + ) +} + +addAndValidateModuleSpecifications <- function(moduleName, analysisSpecifications, moduleSpecifications) { + moduleObj <- get(moduleName)$new() + moduleObj$validateModuleSpecifications(moduleSpecifications) + analysisSpecifications <- addModuleSpecifications( + analysisSpecifications = analysisSpecifications, + moduleSpecifications = moduleSpecifications + ) + return(analysisSpecifications) +} + + +#' Create an empty analysis specifications object. +#' +#' @return +#' An object of type `AnalysisSpecifications`. +#' +#' @export +createEmptyAnalysisSpecificiations <- function() { + analysisSpecifications <- list( + sharedResources = list(), + moduleSpecifications = list() + ) + class(analysisSpecifications) <- "AnalysisSpecifications" + return(analysisSpecifications) +} + + #' Create CDM execution settings #' -#' @param connectionDetailsReference A string that can be used to retrieve database connection details from a secure local -#' store. #' @param workDatabaseSchema A database schema where intermediate data can be stored. The user (as identified in the #' connection details) will need to have write access to this database schema. #' @param cdmDatabaseSchema The database schema containing the data in CDM format. The user (as identified in the @@ -89,35 +249,24 @@ addModuleSpecifications <- function(analysisSpecifications, moduleSpecifications #' [CohortGenerator::getCohortTableNames()] function. #' @param tempEmulationSchema Some database platforms like Oracle and Impala do not truly support temp tables. To emulate temp tables, provide a schema with write privileges where temp tables can be created. #' @param workFolder A folder in the local file system where intermediate results can be written. -#' @param resultsFolder A folder in the local file system where the module output will be written. +#' @template resultsFolder #' @param logFileName Logging information from Strategus and all modules will be located in this file. Individual modules will continue to have their own module-specific logs. By default this will be written to the root of the `resultsFolder` #' @param minCellCount The minimum number of subjects contributing to a count before it can be included #' in results. -#' @param integerAsNumeric Logical: should 32-bit integers be converted to numeric (double) values? If FALSE 32-bit integers will be represented using R's native `Integer` class. Default is TRUE -#' @param integer64AsNumeric Logical: should 64-bit integers be converted to numeric (double) values? If FALSE 64-bit integers will be represented using `bit64::integer64`. Default is TRUE -#' @param resultsConnectionDetailsReference A string that can be used to retrieve the results database connection -#' details from a secure local store. -#' @param resultsDatabaseSchema A schema where the results tables are stored #' #' @return #' An object of type `ExecutionSettings`. #' #' @export -createCdmExecutionSettings <- function(connectionDetailsReference, - workDatabaseSchema, +createCdmExecutionSettings <- function(workDatabaseSchema, cdmDatabaseSchema, cohortTableNames = CohortGenerator::getCohortTableNames(cohortTable = "cohort"), tempEmulationSchema = getOption("sqlRenderTempEmulationSchema"), workFolder, resultsFolder, logFileName = file.path(resultsFolder, "strategus-log.txt"), - minCellCount = 5, - integerAsNumeric = getOption("databaseConnectorIntegerAsNumeric", default = TRUE), - integer64AsNumeric = getOption("databaseConnectorInteger64AsNumeric", default = TRUE), - resultsConnectionDetailsReference = NULL, - resultsDatabaseSchema = NULL) { + minCellCount = 5) { errorMessages <- checkmate::makeAssertCollection() - checkmate::assertCharacter(connectionDetailsReference, len = 1, add = errorMessages) checkmate::assertCharacter(workDatabaseSchema, len = 1, add = errorMessages) checkmate::assertCharacter(cdmDatabaseSchema, len = 1, add = errorMessages) checkmate::assertList(cohortTableNames, add = errorMessages) @@ -125,10 +274,6 @@ createCdmExecutionSettings <- function(connectionDetailsReference, checkmate::assertCharacter(resultsFolder, len = 1, add = errorMessages) checkmate::assertCharacter(logFileName, len = 1, add = errorMessages) checkmate::assertInt(minCellCount, add = errorMessages) - checkmate::assertLogical(integerAsNumeric, max.len = 1, add = errorMessages) - checkmate::assertLogical(integer64AsNumeric, max.len = 1, add = errorMessages) - checkmate::assertCharacter(resultsConnectionDetailsReference, null.ok = TRUE, add = errorMessages) - checkmate::assertCharacter(resultsDatabaseSchema, null.ok = TRUE, add = errorMessages) checkmate::reportAssertions(collection = errorMessages) # Normalize paths to convert relative paths to absolute paths @@ -136,59 +281,38 @@ createCdmExecutionSettings <- function(connectionDetailsReference, resultsFolder <- normalizePath(resultsFolder, mustWork = F) logFileName <- normalizePath(logFileName, mustWork = F) - executionSettings <- list( - connectionDetailsReference = connectionDetailsReference, - workDatabaseSchema = workDatabaseSchema, - cdmDatabaseSchema = cdmDatabaseSchema, - cohortTableNames = cohortTableNames, - tempEmulationSchema = tempEmulationSchema, - workFolder = workFolder, - resultsFolder = resultsFolder, - logFileName = logFileName, - minCellCount = minCellCount, - integerAsNumeric = integerAsNumeric, - integer64AsNumeric = integer64AsNumeric, - resultsConnectionDetailsReference = resultsConnectionDetailsReference, - resultsDatabaseSchema = resultsDatabaseSchema - ) + executionSettings <- list() + for (name in names(formals(createCdmExecutionSettings))) { + executionSettings[[name]] <- get(name) + } class(executionSettings) <- c("CdmExecutionSettings", "ExecutionSettings") return(executionSettings) } #' Create Results execution settings #' -#' @param resultsConnectionDetailsReference A string that can be used to retrieve the results database connection -#' details from a secure local store. -#' @param resultsDatabaseSchema A schema where the results tables are stored +#' @template resultsDatabaseSchema #' @param workFolder A folder in the local file system where intermediate results can be written. -#' @param resultsFolder A folder in the local file system where the module output will be written. +#' @template resultsFolder #' @param logFileName Logging information from Strategus and all modules will be located in this file. Individual modules will continue to have their own module-specific logs. By default this will be written to the root of the `resultsFolder` #' @param minCellCount The minimum number of subjects contributing to a count before it can be included #' in results. -#' @param integerAsNumeric Logical: should 32-bit integers be converted to numeric (double) values? If FALSE 32-bit integers will be represented using R's native `Integer` class. Default is TRUE -#' @param integer64AsNumeric Logical: should 64-bit integers be converted to numeric (double) values? If FALSE 64-bit integers will be represented using `bit64::integer64`. Default is TRUE #' #' @return #' An object of type `ExecutionSettings`. #' #' @export -createResultsExecutionSettings <- function(resultsConnectionDetailsReference, - resultsDatabaseSchema, +createResultsExecutionSettings <- function(resultsDatabaseSchema, workFolder, resultsFolder, logFileName = file.path(resultsFolder, "strategus-log.txt"), - minCellCount = 5, - integerAsNumeric = getOption("databaseConnectorIntegerAsNumeric", default = TRUE), - integer64AsNumeric = getOption("databaseConnectorInteger64AsNumeric", default = TRUE)) { + minCellCount = 5) { errorMessages <- checkmate::makeAssertCollection() - checkmate::assertCharacter(resultsConnectionDetailsReference, len = 1, add = errorMessages) checkmate::assertCharacter(resultsDatabaseSchema, len = 1, add = errorMessages) checkmate::assertCharacter(workFolder, len = 1, add = errorMessages) checkmate::assertCharacter(resultsFolder, len = 1, add = errorMessages) checkmate::assertCharacter(logFileName, len = 1, add = errorMessages) checkmate::assertInt(minCellCount, add = errorMessages) - checkmate::assertLogical(integerAsNumeric, max.len = 1, add = errorMessages) - checkmate::assertLogical(integer64AsNumeric, max.len = 1, add = errorMessages) checkmate::reportAssertions(collection = errorMessages) # Normalize paths to convert relative paths to absolute paths @@ -196,193 +320,45 @@ createResultsExecutionSettings <- function(resultsConnectionDetailsReference, resultsFolder <- normalizePath(resultsFolder, mustWork = F) logFileName <- normalizePath(logFileName, mustWork = F) - executionSettings <- list( - resultsConnectionDetailsReference = resultsConnectionDetailsReference, - resultsDatabaseSchema = resultsDatabaseSchema, - workFolder = workFolder, - resultsFolder = resultsFolder, - logFileName = logFileName, - minCellCount = minCellCount, - integerAsNumeric = integerAsNumeric, - integer64AsNumeric = integer64AsNumeric - ) + executionSettings <- list() + for (name in names(formals(createResultsExecutionSettings))) { + executionSettings[[name]] <- get(name) + } class(executionSettings) <- c("ResultsExecutionSettings", "ExecutionSettings") return(executionSettings) } - - -# Note: assuming connectionDetails objects remain stable across the various module -# versions. - -#' Store connection details in a secure location -#' -#' @param connectionDetails An object of type `connectionDetails` as created by the -#' [DatabaseConnector::createConnectionDetails()] function. -#' @param connectionDetailsReference A string that can be used to retrieve the settings from -#' the secure store. -#' -#' @template keyringName +#' Create Results Data Model Settings #' -#' @seealso [retrieveConnectionDetails()] -#' -#' @return -#' Does not return anything. Is called for the side effect of having the connection details -#' stored. -#' -#' @export -storeConnectionDetails <- function(connectionDetails, connectionDetailsReference, keyringName = NULL) { - errorMessages <- checkmate::makeAssertCollection() - # Get the keyring list and verify that the keyring specified exists. - # In the case of the default NULL keyring, this will be represented as an empty - # string in the keyring list - keyringList <- keyring::keyring_list() - if (is(connectionDetails, "connectionDetails")) { - checkmate::assertClass(connectionDetails, "connectionDetails", add = errorMessages) - } else { - checkmate::assertClass(connectionDetails, "ConnectionDetails", add = errorMessages) - } - checkmate::assertCharacter(connectionDetailsReference, len = 1, add = errorMessages) - checkmate::assertChoice(x = keyringName, choices = keyringList$keyring, null.ok = TRUE, add = errorMessages) - checkmate::reportAssertions(collection = errorMessages) - - # Evaluate functions used to secure details to allow serialization: - for (i in 1:length(connectionDetails)) { - if (is.function(connectionDetails[[i]])) { - detail <- connectionDetails[[i]]() - if (is.null(detail)) { - connectionDetails[[i]] <- .nullList() # Fixes Issue #74 - } else { - connectionDetails[[i]] <- connectionDetails[[i]]() - } - } - } - connectionDetails <- ParallelLogger::convertSettingsToJson(connectionDetails) - # If the keyring is locked, unlock it, set the value and then re-lock it - keyringLocked <- unlockKeyring(keyringName = keyringName) - keyring::key_set_with_value(connectionDetailsReference, password = connectionDetails, keyring = keyringName) - if (keyringLocked) { - keyring::keyring_lock(keyring = keyringName) - } - invisible(NULL) -} - -#' Retrieve connection details from the secure location -#' -#' @param connectionDetailsReference A string that can be used to retrieve the settings from -#' the secure store. -#' -#' @template keyringName +#' @description +#' The results data model settings are used to create the results data +#' model and to upload results. #' -#' @seealso [storeConnectionDetails()] +#' @template resultsDatabaseSchema +#' @template resultsFolder +#' @param logFileName Log location for data model operations #' #' @return -#' Returns an object of type `connectionDetails`. +#' An object of type `ResultsDataModelSettings` #' #' @export -retrieveConnectionDetails <- function(connectionDetailsReference, keyringName = NULL) { - keyringList <- keyring::keyring_list() +createResultsDataModelSettings <- function(resultsDatabaseSchema, + resultsFolder, + logFileName = file.path(resultsFolder, "strategus-results-data-model-log.txt")) { errorMessages <- checkmate::makeAssertCollection() - checkmate::assertCharacter(connectionDetailsReference, len = 1, add = errorMessages) - checkmate::assertLogical(x = (is.null(keyringName) || keyringName %in% keyringList$keyring), add = errorMessages) + checkmate::assertCharacter(resultsDatabaseSchema, len = 1, add = errorMessages) + checkmate::assertCharacter(resultsFolder, len = 1, add = errorMessages) + checkmate::assertCharacter(logFileName, len = 1, add = errorMessages) checkmate::reportAssertions(collection = errorMessages) - if (!connectionDetailsReference %in% keyring::key_list(keyring = keyringName)$service) { - stop("Connection details with connectionDetailsReference = \"", connectionDetailsReference, "\" were not found in your keyring. Please check that you have used the Strategus storeConnectionDetails function to save your connection details with this connectionDetailsReference name.") - } - - # If the keyring is locked, unlock it, set the value and then re-lock it - keyringLocked <- unlockKeyring(keyringName = keyringName) - - connectionDetails <- keyring::key_get(connectionDetailsReference, keyring = keyringName) - connectionDetails <- ParallelLogger::convertJsonToSettings(connectionDetails) - - # Ensure that NA values are converted to NULL prior to calling - # DatabaseConnector. To do this, we'll construct a new connectionDetails - # list from keyring where the connectionDetails are NOT NA. This will - # allow for calling DatabaseConnector::createConnectionDetails with - # NULL values where NAs are present in the serialized version of the - # connectionDetails from keyring. - connectionDetailsConstructedFromKeyring <- list() - for (i in 1:length(connectionDetails)) { - if (isFALSE(is.na(connectionDetails[[i]]))) { - connectionDetailsConstructedFromKeyring[names(connectionDetails)[i]] <- connectionDetails[[i]] - } - } - - connectionDetails <- do.call(DatabaseConnector::createConnectionDetails, connectionDetailsConstructedFromKeyring) - - if (keyringLocked) { - keyring::keyring_lock(keyring = keyringName) - } - - return(connectionDetails) -} - -#' Provides a list of HADES modules to run through Strategus -#' -#' @description -#' This function provides a list of modules and their locations -#' that may be used with Strategus. -#' -#' @return -#' A data.frame() of modules that work with Strategus. This will contain: -#' module = The name of the module -#' version = The version of the module -#' remote_repo = The remote location of the module (i.e. github.com) -#' remote_username = The organization of the module (i.e. OHDSI) -#' module_type = 'cdm' or 'results'. 'cdm' refers to modules that are designed to work against -#' patient level data in the OMOP CDM format. 'results' refers to modules that are designed -#' to work against a results database containing output from a 'cdm' module. -#' -#' @export -getModuleList <- function() { - moduleList <- CohortGenerator::readCsv(file = system.file("csv/modules.csv", - package = "Strategus", - mustWork = TRUE - )) - return(moduleList) -} - -#' Helper function to unlock a keyring -#' -#' @description -#' This helper function is used to unlock a keyring by using the password -#' stored in Sys.getenv("STRATEGUS_KEYRING_PASSWORD"). It will alert -#' the user if the environment variable with the password is not set. -#' -#' @template keyringName -#' -#' @return -#' Returns TRUE if the keyring was unlocked using the password otherwise -#' it returns FALSE -#' -#' @export -unlockKeyring <- function(keyringName) { - # If the keyring is locked, unlock it, set the value and then re-lock it - keyringLocked <- keyring::keyring_is_locked(keyring = keyringName) - if (keyringLocked) { - x <- Sys.getenv("STRATEGUS_KEYRING_PASSWORD") - if (length(x) == 0 || x == "") { - stop(paste0("STRATEGUS_KEYRING_PASSWORD NOT FOUND. STRATEGUS_KEYRING_PASSWORD must be set using Sys.setenv(STRATEGUS_KEYRING_PASSWORD = \"\") to unlock the keyring: ", keyringName)) - } - keyring::keyring_unlock(keyring = keyringName, password = Sys.getenv("STRATEGUS_KEYRING_PASSWORD")) - } - return(keyringLocked) -} + # Normalize paths to convert relative paths to absolute paths + resultsFolder <- normalizePath(resultsFolder, mustWork = F) + logFileName <- normalizePath(logFileName, mustWork = F) -#' @keywords internal -.checkModuleFolderSetting <- function(x) { - if (length(x) == 0 || x == "") { - return(paste0("INSTANTIATED_MODULES_FOLDER environment variable has not been set. INSTANTIATED_MODULES_FOLDER must be set using Sys.setenv(INSTANTIATED_MODULES_FOLDER = \"/somepath\")")) - } else { - return(TRUE) + executionSettings <- list() + for (name in names(formals(createResultsDataModelSettings))) { + executionSettings[[name]] <- get(name) } -} - -#' Used when serializing connection details to retain NULL values -#' -#' @keywords internal -.nullList <- function() { - invisible(list(NULL)) + class(executionSettings) <- c("ResultsDataModelSettings") + return(executionSettings) } diff --git a/R/ShareResults.R b/R/ShareResults.R index d6f1da9a..fbdb4088 100644 --- a/R/ShareResults.R +++ b/R/ShareResults.R @@ -6,8 +6,7 @@ #' relative paths to the root of the `resultsFolder` #' which is generally found in `executionSettings$resultsFolder`. #' -#' @param resultsFolder The folder holding the study results. This is found in -#' `executionSettings$resultsFolder`. +#' @template resultsFolder #' #' @param zipFile The path to the zip file to be created. #' diff --git a/R/Strategus.R b/R/Strategus.R index c3315edc..3c94d018 100644 --- a/R/Strategus.R +++ b/R/Strategus.R @@ -21,7 +21,6 @@ #' @import dplyr #' @import CohortGenerator #' @import DatabaseConnector +#' @import R6 #' @importFrom methods is NULL - -assertModulesFolderSetting <- checkmate::makeAssertionFunction(.checkModuleFolderSetting) diff --git a/extras/ExampleWithResultsUpload.R b/extras/ExampleWithResultsUpload.R deleted file mode 100644 index 5e034be3..00000000 --- a/extras/ExampleWithResultsUpload.R +++ /dev/null @@ -1,96 +0,0 @@ -library(dplyr) -library(CohortGenerator) -library(Strategus) -library(ROhdsiWebApi) - -baseUrl <- "https://change.me:8443/WebAPI" - -atlasCohortIds <- c(5903, 5904) - -username <- "changeme" # Set to your atlas username -dbUsername <- username - -if (.Platform$OS.type == "unix") { - ROhdsiWebApi::authorizeWebApi(baseUrl = baseUrl, - webApiUsername = username, - webApiPassword = getPass::getPass(), - authMethod = "windows") -} else { - ROhdsiWebApi::authorizeWebApi(baseUrl = baseUrl, - authMethod = "windows") -} - -cohortDefinitionSet <- ROhdsiWebApi::exportCohortDefinitionSet(baseUrl = baseUrl, - cohortIds = atlasCohortIds, - generateStats = TRUE) - -dir.create("tmp", showWarnings = F) -source("https://raw.githubusercontent.com/OHDSI/CohortDiagnosticsModule/0.0.8/SettingsFunctions.R") - -cohortDiagnosticsModuleSpecifications <- createCohortDiagnosticsModuleSpecifications( - cohortIds = atlasCohortIds, - runInclusionStatistics = TRUE, - runIncludedSourceConcepts = TRUE, - runOrphanConcepts = TRUE, - runTimeSeries = FALSE, - runVisitContext = TRUE, - runBreakdownIndexEvents = TRUE, - runIncidenceRate = TRUE, - runCohortRelationship = TRUE, - runTemporalCohortCharacterization = TRUE, - incremental = TRUE -) - - -# Create analysis specifications --------------------------------------------- - -source("https://raw.githubusercontent.com/OHDSI/CohortGeneratorModule/main/SettingsFunctions.R") - -cohortDefinitionSharedResource <- createCohortSharedResourceSpecifications(cohortDefinitionSet) -cohortGeneratorModuleSpecifications <- createCohortGeneratorModuleSpecifications(incremental = TRUE, - generateStats = TRUE) - -analysisSpecifications <- Strategus::createEmptyAnalysisSpecificiations() %>% - addSharedResources(cohortDefinitionSharedResource) %>% - addModuleSpecifications(cohortGeneratorModuleSpecifications) %>% - Strategus::addModuleSpecifications(cohortDiagnosticsModuleSpecifications) - - -# Create execution settings ----------------------------------------------------- -connectionDetailsReference <- "eunomia-perm" -Strategus::storeConnectionDetails(Eunomia::getEunomiaConnectionDetails(file.path(normalizePath("tmp"), "eunomia-perm.sqlite")), connectionDetailsReference) - -resultsConnectionReference <- "result-store" -resultsConnectionDetails <- DatabaseConnector::createConnectionDetails(dbms = "sqlite", server = file.path(normalizePath("tmp"), "results.sqlite")) -Strategus::storeConnectionDetails(resultsConnectionDetails, resultsConnectionReference) - -# Note: this environmental variable should be set once for each compute node -Sys.setenv("INSTANTIATED_MODULES_FOLDER" = "~/tmp/StrategusInstantiatedModules") - -# This should be ran once and only once -resultsExecutitionSettings <- Strategus::createResultsExecutionSettings(resultsConnectionDetailsReference = "result-store", - resultsDatabaseSchema = "main", - workFolder = file.path(getwd(), "./tmp/strategusWork"), - resultsFolder = file.path(getwd(),"./tmp/strategusOutput")) - -# Create results schemas for all modules - this is only needed once -Strategus::createResultDataModels(analysisSpecifications = analysisSpecifications, executionSettings = resultsExecutitionSettings) - -# Note: Need to do only once: store con -executionSettings <- Strategus::createCdmExecutionSettings(connectionDetailsReference = connectionDetailsReference, - workDatabaseSchema = "main", - cdmDatabaseSchema = "main", - cohortTableNames = CohortGenerator::getCohortTableNames(cohortTable = "strategus_test"), - workFolder = file.path(getwd(), "./tmp/strategusWork"), - resultsFolder = file.path(getwd(),"./tmp/strategusOutput"), - minCellCount = 5, - resultsDatabaseSchema = "main", - resultsConnectionDetailsReference = resultsConnectionReference) - -ParallelLogger::saveSettingsToJson(executionSettings, "testExecutionSettings.json") - -# Execute analyses ------------------------------------------------------------- -unlink("_targets", recursive = TRUE) - -Strategus::execute(analysisSpecifications = analysisSpecifications, - executionSettings = executionSettings) diff --git a/extras/ExecuteStrategusOnEunomia.R b/extras/ExecuteStrategusOnEunomia.R deleted file mode 100644 index 4adc266a..00000000 --- a/extras/ExecuteStrategusOnEunomia.R +++ /dev/null @@ -1,81 +0,0 @@ -#Install packages required for this script --------- -#install.packages("remotes") -#install.packages("ParallelLogger") -#remotes::install_github("OHDSI/Strategus") -#remotes::install_github("OHDSI/Eunomia") -#remotes::install_github("OHDSI/CohortGenerator") - -#Run the Eunomia study --------- -# Set the folder & environment variable for module storage -moduleFolder <- Sys.getenv("INSTANTIATED_MODULES_FOLDER") -studyFolder <- "C:/temp/strategus/EunomiaTestStudy" - - -if (!dir.exists(moduleFolder)) { - dir.create(moduleFolder, recursive = TRUE) -} -if (!dir.exists(studyFolder)) { - dir.create(studyFolder, recursive = TRUE) -} - -# Create the execution settings for Eunomia ---------- -connectionDetails <- Eunomia::getEunomiaConnectionDetails( - databaseFile = file.path(studyFolder, "cdm.sqlite") -) - -Strategus::storeConnectionDetails(connectionDetails = connectionDetails, - connectionDetailsReference = "eunomia") - -# Set the working directory to studyFolder -# and use relative paths to test -setwd(studyFolder) - -# Execute the study --------- -analysisSpecifications <- ParallelLogger::loadSettingsFromJson( - fileName = system.file("testdata/analysisSpecification.json", - package = "Strategus") -) - -resultsExecutionSettings <- Strategus::createResultsExecutionSettings( - resultsConnectionDetailsReference = "eunomia", - resultsDatabaseSchema = "main", - workFolder = file.path("schema_creation", "work_folder"), - resultsFolder = file.path("schema_creation", "results_folder") -) - -executionSettings <- Strategus::createCdmExecutionSettings( - connectionDetailsReference = "eunomia", - workDatabaseSchema = "main", - cdmDatabaseSchema = "main", - cohortTableNames = CohortGenerator::getCohortTableNames(), - workFolder = "work_folder", - resultsFolder = "results_folder", - minCellCount = 5, - resultsConnectionDetailsReference = "eunomia", - resultsDatabaseSchema = "main" -) - -ParallelLogger::saveSettingsToJson( - object = executionSettings, - file.path(studyFolder, "eunomiaExecutionSettings.json") -) - -executionSettings <- ParallelLogger::loadSettingsFromJson( - fileName = file.path(studyFolder, "eunomiaExecutionSettings.json") -) - -Strategus::storeConnectionDetails( - connectionDetails, - "eunomia" -) - -Strategus::createResultDataModels( - analysisSpecifications = analysisSpecifications, - executionSettings = resultsExecutionSettings -) - -Strategus::execute( - analysisSpecifications = analysisSpecifications, - executionSettings = executionSettings, - executionScriptFolder = file.path(studyFolder, "script_folder") -) diff --git a/extras/PackageMaintenance.R b/extras/PackageMaintenance.R index 0e911d14..d14b1f3b 100644 --- a/extras/PackageMaintenance.R +++ b/extras/PackageMaintenance.R @@ -29,61 +29,6 @@ OhdsiRTools::updateCopyrightYearFolder() OhdsiRTools::findNonAsciiStringsInFolder() devtools::spell_check() -# Update the module version information based on updates found on GitHub -library(dplyr) -# httr::set_config(httr::config(ssl_verifypeer = FALSE)) -updateModuleVersionInfo <- function() { - modules <- CohortGenerator::readCsv(file = "inst/csv/modules.csv") - modules <- modules %>% - select(-c("mainPackage", "mainPackageTag")) - # Get latest module versions --------------------------------------------------- - getLatestModuleVersion <- function(remoteRepo, remoteUsername, module) { - urlTemplate <- "https://api.%s/repos/%s/%s/releases/latest" - req <- httr2::request(base_url = sprintf(urlTemplate, remoteRepo, remoteUsername, module)) |> - httr2::req_headers("Authorization" = paste0("Bearer ", Sys.getenv("GITHUB_PAT")), - "X-GitHub-Api-Version" = "2022-11-28") - response <- httr2::req_perform(req) - release <- jsonlite::fromJSON(httr2::resp_body_string(response)) - return(release$tag_name) - } - versions <- tibble::tibble( - module = modules$module, - moduleVersion = mapply(getLatestModuleVersion, modules$remoteRepo, modules$remoteUsername, modules$module), - mainPackage = "", - mainPackageTag = "" - ) - # Get referenced main package tag ---------------------------------------------- - for (i in 1:nrow(modules)) { - module <- versions$module[i] - if (module == "CohortIncidenceModule") { - urlTemplate <- "https://raw.githubusercontent.com/OHDSI/%s/master/renv.lock" - } else { - urlTemplate <- "https://raw.githubusercontent.com/OHDSI/%s/main/renv.lock" - } - lock <- jsonlite::fromJSON(sprintf(urlTemplate, module)) - mainPackage <- gsub("Module", "", module) - versions$mainPackage[i] <- mainPackage - for (j in seq_along(lock$Packages)) { - if (lock$Packages[[j]]$Package == mainPackage) { - if (is.null(lock$Packages[[j]]$RemoteRef) || tolower(lock$Packages[[j]]$RemoteRef) == "head") { - versions$mainPackageTag[i] <- paste0("v", lock$Packages[[j]]$Version) - } else { - versions$mainPackageTag[i] <- lock$Packages[[j]]$RemoteRef - } - break - } - } - } - moduleList <- versions %>% - dplyr::inner_join(modules, by = c('module' = 'module')) %>% - dplyr::mutate(version = moduleVersion) %>% - dplyr::select(c(names(modules), "mainPackage", "mainPackageTag")) - - CohortGenerator::writeCsv(x = moduleList, - file = "inst/csv/modules.csv") -} -updateModuleVersionInfo() - # Create manual and vignettes: unlink("extras/Strategus.pdf") shell("R CMD Rd2pdf ./ --output=extras/Strategus.pdf") @@ -120,33 +65,404 @@ unlink("inst/doc/IntroductionToStrategus.tex") pkgdown::build_site() OhdsiRTools::fixHadesLogo() -# Repackage the test module for unit testing -# NOTE: This is only necessary when the TestModule -# has been updated -testModuleRootFolder <- "extras/TestModule1-0.0.1" -targetModuleZipFile <- "TestModule1_0.0.1.zip" -testModuleFilesToRemove <- c( - file.path(testModuleRootFolder, ".RData"), - file.path(testModuleRootFolder, ".Rhistory") -) -testModuleDirToRemove <- c( - file.path(testModuleRootFolder, ".Rproj.user"), - file.path(testModuleRootFolder, "renv/library"), - file.path(testModuleRootFolder, "renv/profiles/dev/renv/library") -) -unlink(testModuleFilesToRemove) -unlink(testModuleDirToRemove, recursive = TRUE) - -oldwd <- getwd() -setwd("extras") -zip::zip( - zipfile = targetModuleZipFile, - files = list.files("TestModule1-0.0.1", all.files = TRUE, recursive = TRUE, include.dirs = TRUE, full.names = TRUE) -) -file.copy( - from = targetModuleZipFile, - to = file.path("../inst/testdata", targetModuleZipFile), - overwrite = TRUE -) -file.remove(targetModuleZipFile) -setwd(oldwd) + +# Produce a study analysis specification for testing ----------- +library(Strategus) +cohortDefinitionSet <- getCohortDefinitionSet( + settingsFileName = system.file("testdata/Cohorts.csv", package = "Strategus"), + jsonFolder = system.file("testdata/cohorts", package = "Strategus"), + sqlFolder = system.file("testdata/sql", package = "Strategus") +) +subsetOperations <- list( + createDemographicSubset( + name = "Demographic Criteria", + ageMin = 18, + ageMax = 64 + ) +) +subsetDef <- createCohortSubsetDefinition( + name = "test definition", + definitionId = 1, + subsetOperators = subsetOperations +) +cohortDefinitionSet <- cohortDefinitionSet |> + addCohortSubsetDefinition(subsetDef) + +ncoCohortSet <- readCsv(file = system.file("testdata/negative_controls_concept_set.csv", + package = "Strategus" +)) + +# Exposures-outcomes +negativeControlOutcomeIds <- ncoCohortSet$cohortId +outcomeOfInterestIds <- c(3) +exposureOfInterestIds <- c(1, 2) + +# Characterization ------------------------------- +cModuleSettingsCreator <- CharacterizationModule$new() +cModuleSpecifications <- cModuleSettingsCreator$createModuleSpecifications( + targetIds = c(1, 2), + outcomeIds = 3 +) + +# Cohort Diagnostics ----------------- +cdModuleSettingsCreator <- CohortDiagnosticsModule$new() +cdModuleSpecifications <- cdModuleSettingsCreator$createModuleSpecifications( + runInclusionStatistics = TRUE, + runIncludedSourceConcepts = TRUE, + runOrphanConcepts = TRUE, + runTimeSeries = FALSE, + runVisitContext = TRUE, + runBreakdownIndexEvents = TRUE, + runIncidenceRate = TRUE, + runCohortRelationship = TRUE, + runTemporalCohortCharacterization = TRUE, + incremental = FALSE +) + +# Cohort Generator ----------------- +cgModuleSettingsCreator <- CohortGeneratorModule$new() + +# Create the settings & validate them +cohortSharedResourcesSpecifications <- cgModuleSettingsCreator$createCohortSharedResourceSpecifications(cohortDefinitionSet) +cgModuleSettingsCreator$validateCohortSharedResourceSpecifications(cohortSharedResourcesSpecifications) + +ncoCohortSharedResourceSpecifications <- cgModuleSettingsCreator$createNegativeControlOutcomeCohortSharedResourceSpecifications(ncoCohortSet, "first", TRUE) +cgModuleSettingsCreator$validateNegativeControlOutcomeCohortSharedResourceSpecifications(ncoCohortSharedResourceSpecifications) + +cgModuleSpecifications <- cgModuleSettingsCreator$createModuleSpecifications() + +# Characterization ------------------------------- +cModuleSettingsCreator <- CharacterizationModule$new() +cModuleSpecifications <- cModuleSettingsCreator$createModuleSpecifications( + targetIds = c(1, 2), + outcomeIds = 3 +) + +# Cohort Incidence ----------------- +ciModuleSettingsCreator <- CohortIncidenceModule$new() +targets <- list( + CohortIncidence::createCohortRef(id = 1, name = "Celecoxib"), + CohortIncidence::createCohortRef(id = 2, name = "Diclofenac"), + CohortIncidence::createCohortRef(id = 4, name = "Celecoxib Age >= 30"), + CohortIncidence::createCohortRef(id = 5, name = "Diclofenac Age >= 30") +) +outcomes <- list(CohortIncidence::createOutcomeDef(id = 1, name = "GI bleed", cohortId = 3, cleanWindow = 9999)) + +tars <- list( + CohortIncidence::createTimeAtRiskDef(id = 1, startWith = "start", endWith = "end"), + CohortIncidence::createTimeAtRiskDef(id = 2, startWith = "start", endWith = "start", endOffset = 365) +) +analysis1 <- CohortIncidence::createIncidenceAnalysis( + targets = c(1, 2, 4, 5), + outcomes = c(1), + tars = c(1, 2) +) + +irDesign <- CohortIncidence::createIncidenceDesign( + targetDefs = targets, + outcomeDefs = outcomes, + tars = tars, + analysisList = list(analysis1), + strataSettings = CohortIncidence::createStrataSettings( + byYear = TRUE, + byGender = TRUE + ) +) + +ciModuleSpecifications <- ciModuleSettingsCreator$createModuleSpecifications( + irDesign = irDesign$toList() +) + +# Cohort Method ---------------------- +cmModuleSettingsCreator <- CohortMethodModule$new() +negativeControlOutcomes <- lapply( + X = ncoCohortSet$cohortId, + FUN = CohortMethod::createOutcome, + outcomeOfInterest = FALSE, + trueEffectSize = 1, + priorOutcomeLookback = 30 +) + +outcomesOfInterest <- lapply( + X = 3, + FUN = CohortMethod::createOutcome, + outcomeOfInterest = TRUE +) + +outcomes <- append( + negativeControlOutcomes, + outcomesOfInterest +) + +tcos1 <- CohortMethod::createTargetComparatorOutcomes( + targetId = 1, + comparatorId = 2, + outcomes = outcomes, + excludedCovariateConceptIds = c(1118084, 1124300) +) +tcos2 <- CohortMethod::createTargetComparatorOutcomes( + targetId = 4, + comparatorId = 5, + outcomes = outcomes, + excludedCovariateConceptIds = c(1118084, 1124300) +) + +targetComparatorOutcomesList <- list(tcos1, tcos2) + +covarSettings <- FeatureExtraction::createDefaultCovariateSettings(addDescendantsToExclude = TRUE) + +getDbCmDataArgs <- CohortMethod::createGetDbCohortMethodDataArgs( + washoutPeriod = 183, + firstExposureOnly = TRUE, + removeDuplicateSubjects = "remove all", + maxCohortSize = 100000, + covariateSettings = covarSettings +) + +createStudyPopArgs <- CohortMethod::createCreateStudyPopulationArgs( + minDaysAtRisk = 1, + riskWindowStart = 0, + startAnchor = "cohort start", + riskWindowEnd = 30, + endAnchor = "cohort end" +) + +matchOnPsArgs <- CohortMethod::createMatchOnPsArgs() +fitOutcomeModelArgs <- CohortMethod::createFitOutcomeModelArgs(modelType = "cox") +createPsArgs <- CohortMethod::createCreatePsArgs( + stopOnError = FALSE, + control = Cyclops::createControl(cvRepetitions = 1) +) +computeSharedCovBalArgs <- CohortMethod::createComputeCovariateBalanceArgs() +computeCovBalArgs <- CohortMethod::createComputeCovariateBalanceArgs( + covariateFilter = FeatureExtraction::getDefaultTable1Specifications() +) + +cmAnalysis1 <- CohortMethod::createCmAnalysis( + analysisId = 1, + description = "No matching, simple outcome model", + getDbCohortMethodDataArgs = getDbCmDataArgs, + createStudyPopArgs = createStudyPopArgs, + fitOutcomeModelArgs = fitOutcomeModelArgs +) + +cmAnalysis2 <- CohortMethod::createCmAnalysis( + analysisId = 2, + description = "Matching on ps and covariates, simple outcomeModel", + getDbCohortMethodDataArgs = getDbCmDataArgs, + createStudyPopArgs = createStudyPopArgs, + createPsArgs = createPsArgs, + matchOnPsArgs = matchOnPsArgs, + computeSharedCovariateBalanceArgs = computeSharedCovBalArgs, + computeCovariateBalanceArgs = computeCovBalArgs, + fitOutcomeModelArgs = fitOutcomeModelArgs +) + +cmAnalysisList <- list(cmAnalysis1, cmAnalysis2) + +analysesToExclude <- NULL + +cmModuleSpecifications <- cmModuleSettingsCreator$createModuleSpecifications( + cmAnalysisList = cmAnalysisList, + targetComparatorOutcomesList = targetComparatorOutcomesList, + analysesToExclude = analysesToExclude +) + +# EvidenceSythesis ------------------ +esModuleSettingsCreator = EvidenceSynthesisModule$new() +evidenceSynthesisSourceCm <- esModuleSettingsCreator$createEvidenceSynthesisSource( + sourceMethod = "CohortMethod", + likelihoodApproximation = "adaptive grid" +) +metaAnalysisCm <- esModuleSettingsCreator$createBayesianMetaAnalysis( + evidenceSynthesisAnalysisId = 1, + alpha = 0.05, + evidenceSynthesisDescription = "Bayesian random-effects alpha 0.05 - adaptive grid", + evidenceSynthesisSource = evidenceSynthesisSourceCm +) +evidenceSynthesisSourceSccs <- esModuleSettingsCreator$createEvidenceSynthesisSource( + sourceMethod = "SelfControlledCaseSeries", + likelihoodApproximation = "adaptive grid" +) +metaAnalysisSccs <- esModuleSettingsCreator$createBayesianMetaAnalysis( + evidenceSynthesisAnalysisId = 2, + alpha = 0.05, + evidenceSynthesisDescription = "Bayesian random-effects alpha 0.05 - adaptive grid", + evidenceSynthesisSource = evidenceSynthesisSourceSccs +) +evidenceSynthesisAnalysisList <- list(metaAnalysisCm, metaAnalysisSccs) +evidenceSynthesisAnalysisSpecifications <- esModuleSettingsCreator$createModuleSpecifications( + evidenceSynthesisAnalysisList +) +# PatientLevelPrediction ------------------------------- +plpModuleSettingsCreator <- PatientLevelPredictionModule$new() +makeModelDesignSettings <- function(targetId, outcomeId, popSettings, covarSettings) { + invisible(PatientLevelPrediction::createModelDesign( + targetId = targetId, + outcomeId = outcomeId, + restrictPlpDataSettings = PatientLevelPrediction::createRestrictPlpDataSettings(), + populationSettings = popSettings, + covariateSettings = covarSettings, + preprocessSettings = PatientLevelPrediction::createPreprocessSettings(), + modelSettings = PatientLevelPrediction::setLassoLogisticRegression(), + splitSettings = PatientLevelPrediction::createDefaultSplitSetting(), + runCovariateSummary = T + )) +} + +plpPopulationSettings <- PatientLevelPrediction::createStudyPopulationSettings( + startAnchor = "cohort start", + riskWindowStart = 1, + endAnchor = "cohort start", + riskWindowEnd = 365, + minTimeAtRisk = 1 +) +plpCovarSettings <- FeatureExtraction::createDefaultCovariateSettings() + +modelDesignList <- list() +for (i in 1:length(exposureOfInterestIds)) { + for (j in 1:length(outcomeOfInterestIds)) { + modelDesignList <- append( + modelDesignList, + list( + makeModelDesignSettings( + targetId = exposureOfInterestIds[i], + outcomeId = outcomeOfInterestIds[j], + popSettings = plpPopulationSettings, + covarSettings = plpCovarSettings + ) + ) + ) + } +} + +plpModuleSpecifications <- plpModuleSettingsCreator$createModuleSpecifications( + modelDesignList = modelDesignList +) + +# SelfControlledCaseSeries ------------------------------- +sccsModuleSettingsCreator <- SelfControlledCaseSeriesModule$new() + +exposuresOutcomeList <- list() +for (exposureOfInterestId in exposureOfInterestIds) { + for (outcomeOfInterestId in outcomeOfInterestIds) { + exposuresOutcomeList[[length(exposuresOutcomeList) + 1]] <- SelfControlledCaseSeries::createExposuresOutcome( + outcomeId = outcomeOfInterestId, + exposures = list(SelfControlledCaseSeries::createExposure(exposureId = exposureOfInterestId)) + ) + } + for (negativeControlOutcomeId in negativeControlOutcomeIds) { + exposuresOutcomeList[[length(exposuresOutcomeList) + 1]] <- SelfControlledCaseSeries::createExposuresOutcome( + outcomeId = negativeControlOutcomeId, + exposures = list(SelfControlledCaseSeries::createExposure(exposureId = exposureOfInterestId, trueEffectSize = 1)) + ) + } +} + +getDbSccsDataArgs <- SelfControlledCaseSeries::createGetDbSccsDataArgs( + studyStartDate = "", + studyEndDate = "", + maxCasesPerOutcome = 1e6, + useNestingCohort = TRUE, + nestingCohortId = 1, + deleteCovariatesSmallCount = 0 +) + +createStudyPopulation6AndOlderArgs <- SelfControlledCaseSeries::createCreateStudyPopulationArgs( + minAge = 18, + naivePeriod = 365 +) + +covarPreExp <- SelfControlledCaseSeries::createEraCovariateSettings( + label = "Pre-exposure", + includeEraIds = "exposureId", + start = -30, + end = -1, + endAnchor = "era start" +) + +covarExposureOfInt <- SelfControlledCaseSeries::createEraCovariateSettings( + label = "Main", + includeEraIds = "exposureId", + start = 0, + startAnchor = "era start", + end = 0, + endAnchor = "era end", + profileLikelihood = TRUE, + exposureOfInterest = TRUE +) + +calendarTimeSettings <- SelfControlledCaseSeries::createCalendarTimeCovariateSettings( + calendarTimeKnots = 5, + allowRegularization = TRUE, + computeConfidenceIntervals = FALSE +) + +seasonalitySettings <- SelfControlledCaseSeries::createSeasonalityCovariateSettings( + seasonKnots = 5, + allowRegularization = TRUE, + computeConfidenceIntervals = FALSE +) + +createSccsIntervalDataArgs <- SelfControlledCaseSeries::createCreateSccsIntervalDataArgs( + eraCovariateSettings = list(covarPreExp, covarExposureOfInt), + seasonalityCovariateSettings = seasonalitySettings, + calendarTimeCovariateSettings = calendarTimeSettings, + minCasesForTimeCovariates = 100000 +) + +fitSccsModelArgs <- SelfControlledCaseSeries::createFitSccsModelArgs( + control = Cyclops::createControl( + cvType = "auto", + selectorType = "byPid", + startingVariance = 0.1, + seed = 1, + resetCoefficients = TRUE, + noiseLevel = "quiet" + ) +) + +sccsAnalysis1 <- SelfControlledCaseSeries::createSccsAnalysis( + analysisId = 1, + description = "SCCS age 18-", + getDbSccsDataArgs = getDbSccsDataArgs, + createStudyPopulationArgs = createStudyPopulation6AndOlderArgs, + createIntervalDataArgs = createSccsIntervalDataArgs, + fitSccsModelArgs = fitSccsModelArgs +) + +sccsAnalysisList <- list(sccsAnalysis1) + +sccsModuleSpecifications <- sccsModuleSettingsCreator$createModuleSpecifications( + sccsAnalysisList = sccsAnalysisList, + exposuresOutcomeList = exposuresOutcomeList, + combineDataFetchAcrossOutcomes = FALSE +) + + +# Create analysis specifications CDM modules --------------- +cdmModulesAnalysisSpecifications <- createEmptyAnalysisSpecificiations() |> + addSharedResources(cohortSharedResourcesSpecifications) |> + addSharedResources(ncoCohortSharedResourceSpecifications) |> + addCharacterizationModuleSpecifications(cModuleSpecifications) |> + addCohortDiagnosticsModuleSpecifications(cdModuleSpecifications) |> + addCohortGeneratorModuleSpecifications(cgModuleSpecifications) |> + #addCohortIncidenceModuleSpecifications(ciModuleSpecifications) |> + addCohortMethodeModuleSpecifications(cmModuleSpecifications) |> + #addEvidenceSynthesisModuleSpecifications(evidenceSynthesisAnalysisSpecifications) |> + addSelfControlledCaseSeriesModuleSpecifications(sccsModuleSpecifications) |> + addPatientLevelPredictionModuleSpecifications(plpModuleSpecifications) + +ParallelLogger::saveSettingsToJson( + object = cdmModulesAnalysisSpecifications, + fileName = "inst/testdata/cdmModulesAnalysisSpecifications.json" +) + +# Create analysis specifications results modules --------------- +cdmModulesAnalysisSpecifications <- createEmptyAnalysisSpecificiations() |> + addEvidenceSynthesisModuleSpecifications(evidenceSynthesisAnalysisSpecifications) + +ParallelLogger::saveSettingsToJson( + object = cdmModulesAnalysisSpecifications, + fileName = "inst/testdata/resultsModulesAnalysisSpecifications.json" +) diff --git a/extras/R6ClassFun.R b/extras/R6ClassFun.R new file mode 100644 index 00000000..fe300657 --- /dev/null +++ b/extras/R6ClassFun.R @@ -0,0 +1,537 @@ +library(Strategus) +library(CohortGenerator) + +# Setup some test data ------------ +cohortDefinitionSet <- getCohortDefinitionSet( + settingsFileName = "testdata/Cohorts.csv", + jsonFolder = "testdata/cohorts", + sqlFolder = "testdata/sql", + packageName = "Strategus" +) +subsetOperations <- list( + createDemographicSubset( + name = "Demographic Criteria", + ageMin = 18, + ageMax = 64 + ) +) +subsetDef <- createCohortSubsetDefinition( + name = "test definition", + definitionId = 1, + subsetOperators = subsetOperations +) +cohortDefinitionSet <- cohortDefinitionSet |> + addCohortSubsetDefinition(subsetDef) + +ncoCohortSet <- readCsv(file = system.file("testdata/negative_controls_concept_set.csv", + package = "Strategus" +)) + +# Create the analysis settings --------------- + +# Cohort Generator ----------------- +cgModuleSettingsCreator <- CohortGeneratorModule$new() + +# Create the settings & validate them +cohortSharedResourcesSpecifications <- cgModuleSettingsCreator$createCohortSharedResourceSpecifications(cohortDefinitionSet) +cgModuleSettingsCreator$validateCohortSharedResourceSpecifications(cohortSharedResourcesSpecifications) + +ncoCohortSharedResourceSpecifications <- cgModuleSettingsCreator$createNegativeControlOutcomeCohortSharedResourceSpecifications(ncoCohortSet, "first", TRUE) +cgModuleSettingsCreator$validateNegativeControlOutcomeCohortSharedResourceSpecifications(ncoCohortSharedResourceSpecifications) + +cgModuleSettings <- cgModuleSettingsCreator$createModuleSpecifications() +cgModuleSettingsCreator$validateModuleSpecifications(cgModuleSettings) + +# Characterization ------------------------------- +cModuleSettingsCreator <- CharacterizationModule$new() +cModuleSpecifications <- cModuleSettingsCreator$createModuleSpecifications( + targetIds = c(1, 2), + outcomeIds = 3 +) + +# Cohort Diagnostics ----------------- +cdModuleSettingsCreator <- CohortDiagnosticsModule$new() +cdModuleSpecifications <- cdModuleSettingsCreator$createModuleSpecifications( + runInclusionStatistics = TRUE, + runIncludedSourceConcepts = TRUE, + runOrphanConcepts = TRUE, + runTimeSeries = FALSE, + runVisitContext = TRUE, + runBreakdownIndexEvents = TRUE, + runIncidenceRate = TRUE, + runCohortRelationship = TRUE, + runTemporalCohortCharacterization = TRUE, + incremental = FALSE +) + +# Cohort Incidence ----------------- +library(CohortIncidence) +ciModuleSettingsCreator <- CohortIncidenceModule$new() +targets <- list( + createCohortRef(id = 1, name = "Celecoxib"), + createCohortRef(id = 2, name = "Diclofenac"), + createCohortRef(id = 4, name = "Celecoxib Age >= 30"), + createCohortRef(id = 5, name = "Diclofenac Age >= 30") +) +outcomes <- list(createOutcomeDef(id = 1, name = "GI bleed", cohortId = 3, cleanWindow = 9999)) + +tars <- list( + createTimeAtRiskDef(id = 1, startWith = "start", endWith = "end"), + createTimeAtRiskDef(id = 2, startWith = "start", endWith = "start", endOffset = 365) +) +analysis1 <- createIncidenceAnalysis( + targets = c(1, 2, 4, 5), + outcomes = c(1), + tars = c(1, 2) +) + +irDesign <- createIncidenceDesign( + targetDefs = targets, + outcomeDefs = outcomes, + tars = tars, + analysisList = list(analysis1), + strataSettings = createStrataSettings( + byYear = TRUE, + byGender = TRUE + ) +) + +ciModuleSettings <- ciModuleSettingsCreator$createModuleSpecifications( + irDesign = irDesign$toList() +) +ciModuleSettingsCreator$validateModuleSpecifications(ciModuleSettings) + +# Cohort Method ---------------------- +library(CohortMethod) +cmModuleSettingsCreator <- CohortMethodModule$new() +negativeControlOutcomes <- lapply( + X = ncoCohortSet$cohortId, + FUN = createOutcome, + outcomeOfInterest = FALSE, + trueEffectSize = 1, + priorOutcomeLookback = 30 +) + +outcomesOfInterest <- lapply( + X = 3, + FUN = createOutcome, + outcomeOfInterest = TRUE +) + +outcomes <- append( + negativeControlOutcomes, + outcomesOfInterest +) + +tcos1 <- CohortMethod::createTargetComparatorOutcomes( + targetId = 1, + comparatorId = 2, + outcomes = outcomes, + excludedCovariateConceptIds = c(1118084, 1124300) +) +tcos2 <- CohortMethod::createTargetComparatorOutcomes( + targetId = 4, + comparatorId = 5, + outcomes = outcomes, + excludedCovariateConceptIds = c(1118084, 1124300) +) + +targetComparatorOutcomesList <- list(tcos1, tcos2) + +covarSettings <- FeatureExtraction::createDefaultCovariateSettings(addDescendantsToExclude = TRUE) + +getDbCmDataArgs <- CohortMethod::createGetDbCohortMethodDataArgs( + washoutPeriod = 183, + firstExposureOnly = TRUE, + removeDuplicateSubjects = "remove all", + maxCohortSize = 100000, + covariateSettings = covarSettings +) + +createStudyPopArgs <- CohortMethod::createCreateStudyPopulationArgs( + minDaysAtRisk = 1, + riskWindowStart = 0, + startAnchor = "cohort start", + riskWindowEnd = 30, + endAnchor = "cohort end" +) + +matchOnPsArgs <- CohortMethod::createMatchOnPsArgs() +fitOutcomeModelArgs <- CohortMethod::createFitOutcomeModelArgs(modelType = "cox") +createPsArgs <- CohortMethod::createCreatePsArgs( + stopOnError = FALSE, + control = Cyclops::createControl(cvRepetitions = 1) +) +computeSharedCovBalArgs <- CohortMethod::createComputeCovariateBalanceArgs() +computeCovBalArgs <- CohortMethod::createComputeCovariateBalanceArgs( + covariateFilter = FeatureExtraction::getDefaultTable1Specifications() +) + +cmAnalysis1 <- CohortMethod::createCmAnalysis( + analysisId = 1, + description = "No matching, simple outcome model", + getDbCohortMethodDataArgs = getDbCmDataArgs, + createStudyPopArgs = createStudyPopArgs, + fitOutcomeModelArgs = fitOutcomeModelArgs +) + +cmAnalysis2 <- CohortMethod::createCmAnalysis( + analysisId = 2, + description = "Matching on ps and covariates, simple outcomeModel", + getDbCohortMethodDataArgs = getDbCmDataArgs, + createStudyPopArgs = createStudyPopArgs, + createPsArgs = createPsArgs, + matchOnPsArgs = matchOnPsArgs, + computeSharedCovariateBalanceArgs = computeSharedCovBalArgs, + computeCovariateBalanceArgs = computeCovBalArgs, + fitOutcomeModelArgs = fitOutcomeModelArgs +) + +cmAnalysisList <- list(cmAnalysis1, cmAnalysis2) + +analysesToExclude <- NULL + +cmModuleSpecifications <- cmModuleSettingsCreator$createModuleSpecifications( + cmAnalysisList = cmAnalysisList, + targetComparatorOutcomesList = targetComparatorOutcomesList, + analysesToExclude = analysesToExclude +) + +# SelfControlledCaseSeries ------------------------------- +library(SelfControlledCaseSeries) +sccsModuleSettingsCreator <- SelfControlledCaseSeriesModule$new() + +# Exposures-outcomes +negativeControlOutcomeIds <- ncoCohortSet$cohortId +outcomeOfInterestIds <- c(3) +exposureOfInterestIds <- c(1, 2) + +exposuresOutcomeList <- list() +for (exposureOfInterestId in exposureOfInterestIds) { + for (outcomeOfInterestId in outcomeOfInterestIds) { + exposuresOutcomeList[[length(exposuresOutcomeList) + 1]] <- createExposuresOutcome( + outcomeId = outcomeOfInterestId, + exposures = list(createExposure(exposureId = exposureOfInterestId)) + ) + } + for (negativeControlOutcomeId in negativeControlOutcomeIds) { + exposuresOutcomeList[[length(exposuresOutcomeList) + 1]] <- createExposuresOutcome( + outcomeId = negativeControlOutcomeId, + exposures = list(createExposure(exposureId = exposureOfInterestId, trueEffectSize = 1)) + ) + } +} + +getDbSccsDataArgs <- SelfControlledCaseSeries::createGetDbSccsDataArgs( + studyStartDate = "", + studyEndDate = "", + maxCasesPerOutcome = 1e6, + useNestingCohort = TRUE, + nestingCohortId = 1, + deleteCovariatesSmallCount = 0 +) + +createStudyPopulation6AndOlderArgs <- SelfControlledCaseSeries::createCreateStudyPopulationArgs( + minAge = 18, + naivePeriod = 365 +) + +covarPreExp <- SelfControlledCaseSeries::createEraCovariateSettings( + label = "Pre-exposure", + includeEraIds = "exposureId", + start = -30, + end = -1, + endAnchor = "era start" +) + +covarExposureOfInt <- SelfControlledCaseSeries::createEraCovariateSettings( + label = "Main", + includeEraIds = "exposureId", + start = 0, + startAnchor = "era start", + end = 0, + endAnchor = "era end", + profileLikelihood = TRUE, + exposureOfInterest = TRUE +) + +calendarTimeSettings <- SelfControlledCaseSeries::createCalendarTimeCovariateSettings( + calendarTimeKnots = 5, + allowRegularization = TRUE, + computeConfidenceIntervals = FALSE +) + +seasonalitySettings <- SelfControlledCaseSeries::createSeasonalityCovariateSettings( + seasonKnots = 5, + allowRegularization = TRUE, + computeConfidenceIntervals = FALSE +) + +createSccsIntervalDataArgs <- SelfControlledCaseSeries::createCreateSccsIntervalDataArgs( + eraCovariateSettings = list(covarPreExp, covarExposureOfInt), + seasonalityCovariateSettings = seasonalitySettings, + calendarTimeCovariateSettings = calendarTimeSettings, + minCasesForTimeCovariates = 100000 +) + +fitSccsModelArgs <- SelfControlledCaseSeries::createFitSccsModelArgs( + control = Cyclops::createControl( + cvType = "auto", + selectorType = "byPid", + startingVariance = 0.1, + seed = 1, + resetCoefficients = TRUE, + noiseLevel = "quiet" + ) +) + +sccsAnalysis1 <- SelfControlledCaseSeries::createSccsAnalysis( + analysisId = 1, + description = "SCCS age 18-", + getDbSccsDataArgs = getDbSccsDataArgs, + createStudyPopulationArgs = createStudyPopulation6AndOlderArgs, + createIntervalDataArgs = createSccsIntervalDataArgs, + fitSccsModelArgs = fitSccsModelArgs +) + +sccsAnalysisList <- list(sccsAnalysis1) + +sccsModuleSpecifications <- sccsModuleSettingsCreator$createModuleSpecifications( + sccsAnalysisList = sccsAnalysisList, + exposuresOutcomeList = exposuresOutcomeList, + combineDataFetchAcrossOutcomes = FALSE +) + +# PatientLevelPrediction ------------------------------- +plpModuleSettingsCreator <- PatientLevelPredictionModule$new() +makeModelDesignSettings <- function(targetId, outcomeId, popSettings, covarSettings) { + invisible(PatientLevelPrediction::createModelDesign( + targetId = targetId, + outcomeId = outcomeId, + restrictPlpDataSettings = PatientLevelPrediction::createRestrictPlpDataSettings(), + populationSettings = popSettings, + covariateSettings = covarSettings, + preprocessSettings = PatientLevelPrediction::createPreprocessSettings(), + modelSettings = PatientLevelPrediction::setLassoLogisticRegression(), + splitSettings = PatientLevelPrediction::createDefaultSplitSetting(), + runCovariateSummary = T + )) +} + +plpPopulationSettings <- PatientLevelPrediction::createStudyPopulationSettings( + startAnchor = "cohort start", + riskWindowStart = 1, + endAnchor = "cohort start", + riskWindowEnd = 365, + minTimeAtRisk = 1 +) +plpCovarSettings <- FeatureExtraction::createDefaultCovariateSettings() + +modelDesignList <- list() +for (i in 1:length(exposureOfInterestIds)) { + for (j in 1:length(outcomeOfInterestIds)) { + modelDesignList <- append( + modelDesignList, + list( + makeModelDesignSettings( + targetId = exposureOfInterestIds[i], + outcomeId = outcomeOfInterestIds[j], + popSettings = plpPopulationSettings, + covarSettings = plpCovarSettings + ) + ) + ) + } +} + +plpModuleSpecifications <- plpModuleSettingsCreator$createModuleSpecifications( + modelDesignList = modelDesignList +) + +# Create analysis specifications --------------- +analysisSpecifications <- createEmptyAnalysisSpecificiations() |> + addSharedResources(cohortSharedResourcesSpecifications) |> + addSharedResources(ncoCohortSharedResourceSpecifications) |> + addCohortGeneratorModuleSpecifications(cgModuleSettings) |> + addCohortDiagnosticsModuleSpecifications(cdModuleSpecifications) |> + addCharacterizationModuleSpecifications(cModuleSpecifications) |> + addCohortIncidenceModuleSpecifications(ciModuleSettings) |> + addCohortMethodeModuleSpecifications(cmModuleSpecifications) |> + addSelfControlledCaseSeriesModuleSpecifications(sccsModuleSpecifications) |> + addPatientLevelPredictionModuleSpecifications(plpModuleSpecifications) |> + +# Cleanup any prior results ----------------- +outputFolder <- "D:/TEMP/StrategusR6Testing" +unlink(outputFolder, recursive = T, force = T) +dir.create(outputFolder, recursive = T) + +# Execute ------------------- +ParallelLogger::saveSettingsToJson(analysisSpecifications, file.path(outputFolder, "analysisSettings.json")) +workFolder <- file.path(outputFolder, "work_folder") +resultsFolder <- file.path(outputFolder, "results_folder") + +executionSettings <- Strategus::createCdmExecutionSettings( + workDatabaseSchema = "main", + cdmDatabaseSchema = "main", + cohortTableNames = CohortGenerator::getCohortTableNames(cohortTable = "strategus_test"), + workFolder = workFolder, + resultsFolder = resultsFolder, + minCellCount = 5 +) + +connectionDetails <- Eunomia::getEunomiaConnectionDetails( + databaseFile = file.path(outputFolder, "Eunomia.sqlite"), + overwrite = TRUE +) +#debugonce(Strategus::execute) +Strategus::execute( + analysisSpecifications = analysisSpecifications, + executionSettings = executionSettings, + connectionDetails = connectionDetails +) + +# # # DEBUG CD +# cdModule <- CohortDiagnosticsModule$new() +# debugonce(cdModule$execute) +# executionSettings$databaseId = "Eunomia" +# cdModule$execute( +# analysisSpecifications = analysisSpecifications, +# executionSettings = executionSettings, +# connectionDetails = connectionDetails +# ) + +# Create empty results database ------------------------- +library(RSQLite) +if (file.exists(file.path(outputFolder, "results.sqlite"))) { + unlink(file.path(outputFolder, "results.sqlite")) +} +mydb <- dbConnect(RSQLite::SQLite(), file.path(outputFolder, "results.sqlite")) +dbDisconnect(mydb) + +resultsConnectionDetails <- DatabaseConnector::createConnectionDetails( + dbms = "sqlite", + server = file.path(outputFolder, "results.sqlite") +) + +# Create results data model ------------------------- +resultsDataModelSettings <- Strategus::createResultsDataModelSettings( + resultsDatabaseSchema = "main", + resultsFolder = executionSettings$resultsFolder, +) + +# NOTE: CI has not implemented this so it will error out. +Strategus::createResultDataModel( + analysisSpecifications = analysisSpecifications, + resultsDataModelSettings = resultsDataModelSettings, + resultsConnectionDetails = resultsConnectionDetails +) + +# Upload results --------------- +debugonce(Strategus::uploadResults) +Strategus::uploadResults( + analysisSpecifications = analysisSpecifications, + resultsDataModelSettings = resultsDataModelSettings, + resultsConnectionDetails = resultsConnectionDetails +) + +# Peek in the results database --------------- +# conn <- DatabaseConnector::connect(resultsConnectionDetails) +# DatabaseConnector::disconnect(conn) + +# Run EvidenceSythesis Module ------------------ +esModuleSettingsCreator = EvidenceSynthesisModule$new() +evidenceSynthesisSourceCm <- esModuleSettingsCreator$createEvidenceSynthesisSource( + sourceMethod = "CohortMethod", + likelihoodApproximation = "adaptive grid" +) +metaAnalysisCm <- esModuleSettingsCreator$createBayesianMetaAnalysis( + evidenceSynthesisAnalysisId = 1, + alpha = 0.05, + evidenceSynthesisDescription = "Bayesian random-effects alpha 0.05 - adaptive grid", + evidenceSynthesisSource = evidenceSynthesisSourceCm +) +evidenceSynthesisSourceSccs <- esModuleSettingsCreator$createEvidenceSynthesisSource( + sourceMethod = "SelfControlledCaseSeries", + likelihoodApproximation = "adaptive grid" +) +metaAnalysisSccs <- esModuleSettingsCreator$createBayesianMetaAnalysis( + evidenceSynthesisAnalysisId = 2, + alpha = 0.05, + evidenceSynthesisDescription = "Bayesian random-effects alpha 0.05 - adaptive grid", + evidenceSynthesisSource = evidenceSynthesisSourceSccs +) +evidenceSynthesisAnalysisList <- list(metaAnalysisCm, metaAnalysisSccs) +evidenceSynthesisAnalysisSpecifications <- esModuleSettingsCreator$createModuleSpecifications( + evidenceSynthesisAnalysisList +) +esAnalysisSpecifications <- Strategus::createEmptyAnalysisSpecificiations() |> + Strategus::addModuleSpecifications(evidenceSynthesisAnalysisSpecifications) + +ParallelLogger::saveSettingsToJson(esAnalysisSpecifications, file.path(outputFolder, "evidenceSynthesisAnalysisSpecifications.json")) + + +resultsExecutionSettings <- Strategus::createResultsExecutionSettings( + resultsDatabaseSchema = "main", + resultsFolder = file.path(outputFolder, "evidence_sythesis", "results_folder"), + workFolder = file.path(outputFolder, "evidence_sythesis", "work_folder") +) + +Strategus::execute( + analysisSpecifications = esAnalysisSpecifications, + executionSettings = resultsExecutionSettings, + connectionDetails = resultsConnectionDetails +) + +resultsDataModelSettings <- Strategus::createResultsDataModelSettings( + resultsDatabaseSchema = "main", + resultsFolder = resultsExecutionSettings$resultsFolder, +) + +Strategus::createResultDataModel( + analysisSpecifications = esAnalysisSpecifications, + resultsDataModelSettings = resultsDataModelSettings, + resultsConnectionDetails = resultsConnectionDetails +) + +Strategus::uploadResults( + analysisSpecifications = esAnalysisSpecifications, + resultsDataModelSettings = resultsDataModelSettings, + resultsConnectionDetails = resultsConnectionDetails +) + +# Review results -------------------------- +library(ShinyAppBuilder) #NOTE: remotes::install_github("OHDSI/ShinyAppBuilder", ref="estimation") +library(OhdsiShinyModules) #NOTE: remotes::install_github("OHDSI/OhdsiShinyModules", ref="estimation-updated") +# ADD OR REMOVE MODULES TAILORED TO YOUR STUDY +shinyConfig <- initializeModuleConfig() |> + addModuleConfig( + createDefaultAboutConfig() + ) |> + addModuleConfig( + createDefaultDatasourcesConfig() + ) |> + addModuleConfig( + createDefaultCohortGeneratorConfig() + ) |> + addModuleConfig( + createDefaultCohortDiagnosticsConfig() + ) |> + addModuleConfig( + createDefaultCharacterizationConfig() + ) |> + addModuleConfig( + createDefaultPredictionConfig() + ) |> + addModuleConfig( + createDefaultEstimationConfig() + ) + +# now create the shiny app based on the config file and view the results +# based on the connection +ShinyAppBuilder::createShinyApp( + config = shinyConfig, + connectionDetails = resultsConnectionDetails, + resultDatabaseSettings = createDefaultResultDatabaseSettings(schema = "main"), + title = "Celecoxib vs. Diclofinac for the risk of GI Bleed", + studyDescription = "This study is showcasing the capabilities of running Strategus on Eunomia." +) diff --git a/extras/TestAnalysisSpecifications.json b/extras/TestAnalysisSpecifications.json deleted file mode 100644 index 80d29713..00000000 --- a/extras/TestAnalysisSpecifications.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "sharedResources": [], - "moduleSpecifications": [ - { - "module": "TestModule1", - "version": "0.0.1", - "settings": { - "dataSetName": "cars" - }, - "attr_class": ["TestModule1Specifications", "ModuleSpecifications"] - } - ], - "attr_class": "AnalysisSpecifications" -} diff --git a/extras/TestExample.R b/extras/TestExample.R deleted file mode 100644 index 10bb735b..00000000 --- a/extras/TestExample.R +++ /dev/null @@ -1,51 +0,0 @@ -library(Strategus) -library(dplyr) - - -# Create analysis specifications --------------------------------------------- - -# Note: maybe this function should live in the module? -createTestModule1Specifications <- function(dataSetName = "cars") { - specifications <- list(module = "TestModule1", - version = "0.0.1", - settings = list(dataSetName = dataSetName)) - class(specifications) <- c("TestModule1Specifications", "ModuleSpecifications") - return(specifications) -} - -analysisSpecifications <- createEmptyAnalysisSpecificiations() %>% - addModuleSpecifications(createTestModule1Specifications()) - -ParallelLogger::saveSettingsToJson(analysisSpecifications, "extras/testAnalysisSpecifications.json") - - -# Create execution settings ----------------------------------------------------- -connectionDetailsReference <- "prod-1-mdcd" - -# Note: Need to do only once: store connection details in keyring: -connectionDetails <- DatabaseConnector::createConnectionDetails(dbms = "redshift", - connectionString = keyring::key_get("redShiftConnectionStringOhdaMdcd"), - user = keyring::key_get("redShiftUserName"), - password = keyring::key_get("redShiftPassword")) - -storeConnectionDetails(connectionDetails = connectionDetails, - connectionDetailsReference = connectionDetailsReference) - -executionSettings <- createCdmExecutionSettings(connectionDetailsReference = connectionDetailsReference, - workDatabaseSchema = "scratch_mschuemi", - cdmDatabaseSchema = "cdm_truven_mdcd_v1978", - cohortTableNames = CohortGenerator::getCohortTableNames(cohortTable = "strategus_test"), - workFolder = "c:/temp/strategusWork", - resultsFolder = "c:/temp/strategusOutput", - minCellCount = 5) - -ParallelLogger::saveSettingsToJson(executionSettings, "extras/testExecutionSettings.json") - -# Execute analyses ------------------------------------------------------------- - -# Note: this environmental variable should be set once for each compute node -Sys.setenv("INSTANTIATED_MODULES_FOLDER" = "c:/temp/StrategusInstantiatedModules") -unlink("_targets", recursive = TRUE) - -execute(analysisSpecifications = analysisSpecifications, - executionSettings = executionSettings) diff --git a/extras/TestModule1-0.0.1/.Rprofile b/extras/TestModule1-0.0.1/.Rprofile deleted file mode 100644 index 81b960f5..00000000 --- a/extras/TestModule1-0.0.1/.Rprofile +++ /dev/null @@ -1 +0,0 @@ -source("renv/activate.R") diff --git a/extras/TestModule1-0.0.1/.renvignore b/extras/TestModule1-0.0.1/.renvignore deleted file mode 100644 index 8b4af754..00000000 --- a/extras/TestModule1-0.0.1/.renvignore +++ /dev/null @@ -1,4 +0,0 @@ -SettingsFunctions.R - -extras/ -/tests/ diff --git a/extras/TestModule1-0.0.1/Main.R b/extras/TestModule1-0.0.1/Main.R deleted file mode 100644 index f4b95422..00000000 --- a/extras/TestModule1-0.0.1/Main.R +++ /dev/null @@ -1,132 +0,0 @@ -# Copyright 2024 Observational Health Data Sciences and Informatics -# -# This file is part of Strategus -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Adding library references that are required for Strategus -library(CohortGenerator) -library(DatabaseConnector) -library(keyring) -library(ParallelLogger) -library(SqlRender) - -# Adding RSQLite so that we can test modules with Eunomia -library(RSQLite) - -execute <- function(jobContext) { - rlang::inform("Validating inputs") - checkmate::assert_list(x = jobContext) - if (is.null(jobContext$settings)) { - stop("Analysis settings not found in job context") - } - if (is.null(jobContext$sharedResources)) { - stop("Shared resources not found in job context") - } - if (is.null(jobContext$moduleExecutionSettings)) { - stop("Execution settings not found in job context") - } - - rlang::inform("Executing") - # Establish the connection and ensure the cleanup is performed - connection <- DatabaseConnector::connect(jobContext$moduleExecutionSettings$connectionDetails) - on.exit(DatabaseConnector::disconnect(connection)) - - sql <- "CREATE TABLE #Codesets ( - codeset_id int NOT NULL, - concept_id bigint NOT NULL - ) - ; - - INSERT INTO #Codesets (codeset_id, concept_id) - SELECT 0 as codeset_id, c.concept_id - FROM @cdm_database_schema.CONCEPT c - WHERE c.concept_id = 0 - ;" - - DatabaseConnector::renderTranslateExecuteSql( - connection = connection, - sql = sql, - tempEmulationSchema = jobContext$moduleExecutionSettings$tempEmulationSchema, - cdm_database_schema = jobContext$moduleExecutionSettings$cdmDatabaseSchema - ) - - data <- DatabaseConnector::renderTranslateQuerySql( - connection = connection, - sql = "SELECT * FROM #Codesets;", - tempEmulationSchema = jobContext$moduleExecutionSettings$tempEmulationSchema - ) - - DatabaseConnector::renderTranslateExecuteSql( - connection = connection, - sql = "TRUNCATE TABLE #Codesets; DROP TABLE #Codesets;", - tempEmulationSchema = jobContext$moduleExecutionSettings$tempEmulationSchema - ) - - message("Exporting data") - moduleInfo <- getModuleInfo() - resultsFolder <- jobContext$moduleExecutionSettings$resultsSubFolder - fileName <- file.path(resultsFolder, paste0(moduleInfo$TablePrefix, "unit_test.csv")) - readr::write_csv(data, fileName) - - # Set the table names in resultsDataModelSpecification.csv - moduleInfo <- getModuleInfo() - resultsDataModel <- CohortGenerator::readCsv( - file = "resultsDataModelSpecification.csv", - warnOnCaseMismatch = FALSE - ) - resultsDataModel$tableName <- paste0(moduleInfo$TablePrefix, resultsDataModel$tableName) - CohortGenerator::writeCsv( - x = resultsDataModel, - file = file.path(resultsFolder, "resultsDataModelSpecification.csv"), - warnOnCaseMismatch = FALSE, - warnOnFileNameCaseMismatch = FALSE, - warnOnUploadRuleViolations = FALSE - ) - - ParallelLogger::logTrace("Finished TestModule1") -} - -createDataModelSchema <- function(jobContext) { - checkmate::assert_class(jobContext$moduleExecutionSettings$resultsConnectionDetails, "ConnectionDetails") - checkmate::assert_string(jobContext$moduleExecutionSettings$resultsDatabaseSchema) - connectionDetails <- jobContext$moduleExecutionSettings$resultsConnectionDetails - moduleInfo <- getModuleInfo() - tablePrefix <- moduleInfo$TablePrefix - resultsDatabaseSchema <- jobContext$moduleExecutionSettings$resultsDatabaseSchema - resultsDataModel <- ResultModelManager::loadResultsDataModelSpecifications( - filePath = "resultsDataModelSpecification.csv" - ) - resultsDataModel$tableName <- paste0(tablePrefix, resultsDataModel$tableName) - sql <- ResultModelManager::generateSqlSchema( - schemaDefinition = resultsDataModel - ) - sql <- SqlRender::render( - sql = sql, - database_schema = resultsDatabaseSchema - ) - connection <- DatabaseConnector::connect( - connectionDetails = connectionDetails - ) - on.exit(DatabaseConnector::disconnect(connection)) - DatabaseConnector::executeSql( - connection = connection, - sql = sql - ) -} - -# Private methods ------------------------- -getModuleInfo <- function() { - checkmate::assert_file_exists("MetaData.json") - return(ParallelLogger::loadSettingsFromJson("MetaData.json")) -} diff --git a/extras/TestModule1-0.0.1/MetaData.json b/extras/TestModule1-0.0.1/MetaData.json deleted file mode 100644 index 02bf1fa8..00000000 --- a/extras/TestModule1-0.0.1/MetaData.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "Name": "TestModule1", - "Version": "0.0.1", - "Dependencies": [], - "TablePrefix": "tm1_" -} diff --git a/extras/TestModule1-0.0.1/SettingsFunctions.R b/extras/TestModule1-0.0.1/SettingsFunctions.R deleted file mode 100644 index a5ecbf83..00000000 --- a/extras/TestModule1-0.0.1/SettingsFunctions.R +++ /dev/null @@ -1,20 +0,0 @@ -#' Create specifications for the TestModule1 -#' -#' @return -#' An object of type `TestModule1Specifications`. -#' -#' @export -createTestModule1Specifications <- function() { - analysis <- list() - for (name in names(formals(createTestModule1Specifications))) { - analysis[[name]] <- get(name) - } - - specifications <- list( - module = "TestModule1", - version = "0.0.1", - settings = analysis - ) - class(specifications) <- c("TestModule1Specifications", "ModuleSpecifications") - return(specifications) -} diff --git a/extras/TestModule1-0.0.1/TestModule1.Rproj b/extras/TestModule1-0.0.1/TestModule1.Rproj deleted file mode 100644 index d64e28bb..00000000 --- a/extras/TestModule1-0.0.1/TestModule1.Rproj +++ /dev/null @@ -1,16 +0,0 @@ -Version: 1.0 - -RestoreWorkspace: Default -SaveWorkspace: Default -AlwaysSaveHistory: Default - -EnableCodeIndexing: Yes -UseSpacesForTab: Yes -NumSpacesForTab: 2 -Encoding: UTF-8 - -RnwWeave: knitr -LaTeX: pdfLaTeX - -AutoAppendNewline: Yes -StripTrailingWhitespace: Yes diff --git a/extras/TestModule1-0.0.1/extras/CreateJobContextForTesting.R b/extras/TestModule1-0.0.1/extras/CreateJobContextForTesting.R deleted file mode 100644 index 15c6dbe3..00000000 --- a/extras/TestModule1-0.0.1/extras/CreateJobContextForTesting.R +++ /dev/null @@ -1,46 +0,0 @@ -# Create a job context for testing purposes -library(Strategus) -library(dplyr) -source("SettingsFunctions.R") - -# Generic Helpers ---------------------------- -getModuleInfo <- function() { - checkmate::assert_file_exists("MetaData.json") - return(ParallelLogger::loadSettingsFromJson("MetaData.json")) -} - -# Sample Data Helpers ---------------------------- -testModule1Specifications <- createTestModule1Specifications() - -# Module Settings Spec ---------------------------- -analysisSpecifications <- createEmptyAnalysisSpecificiations() %>% - addModuleSpecifications(testModule1Specifications) - -executionSettings <- Strategus::createCdmExecutionSettings( - connectionDetailsReference = "dummy", - workDatabaseSchema = "main", - cdmDatabaseSchema = "main", - cohortTableNames = CohortGenerator::getCohortTableNames(cohortTable = "cohort"), - workFolder = "dummy", - resultsFolder = "dummy", - minCellCount = 5 -) - -# Job Context ---------------------------- -module <- "TestModule1" -moduleIndex <- 1 -moduleExecutionSettings <- executionSettings -moduleExecutionSettings$workSubFolder <- "dummy" -moduleExecutionSettings$resultsSubFolder <- "dummy" -moduleExecutionSettings$databaseId <- 123 -jobContext <- list( - sharedResources = analysisSpecifications$sharedResources, - settings = analysisSpecifications$moduleSpecifications[[moduleIndex]]$settings, - moduleExecutionSettings = moduleExecutionSettings -) -saveRDS(jobContext, "tests/testJobContext.rds") - -ParallelLogger::saveSettingsToJson( - object = analysisSpecifications, - fileName = "../../../inst/testdata/testModuleAnalysisSpecification.json" -) diff --git a/extras/TestModule1-0.0.1/extras/ModuleMaintenance.R b/extras/TestModule1-0.0.1/extras/ModuleMaintenance.R deleted file mode 100644 index 7b05233d..00000000 --- a/extras/TestModule1-0.0.1/extras/ModuleMaintenance.R +++ /dev/null @@ -1,241 +0,0 @@ -# Copyright 2024 Observational Health Data Sciences and Informatics -# -# This file is part of CohortGeneratorModule -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Format and check code: -styler::style_dir() -OhdsiRTools::updateCopyrightYearFolder(path = ".", recursive = F) -OhdsiRTools::updateCopyrightYearFolder(path = "./extras", recursive = F) -OhdsiRTools::findNonAsciiStringsInFolder() - -# Generate renv lock file for default & dev profiles ------------ -# updatedPackages and updatedDevPackages are those packages that either -# 1. Cannot be synced between the HADES-wide lock file and the project lock files -# since they are not in semver format -# 2. Updates to HADES packages -# NOTE: Mandatory Strategus dependencies: -# CohortGenerator -# DatabaseConnector -# keyring -# ParallelLogger -# SqlRender -# are explicitly included in Main.R as library calls to allow renv's init() -# function to find them and include them even if they are not used in the -# module code. - -updatedPackages <- list( - list( - Package = "askpass", - Version = "1.2.0", - Source = "Repository", - Repository = "CRAN" - ), - "OHDSI/CohortGenerator@v0.8.1", - "OHDSI/ResultModelManager@v0.5.6" -) -updatedDevPackages <- list( - list( - Package = "evaluate", - Version = "0.22", - Source = "Repository", - Repository = "CRAN" - ), - "OHDSI/Eunomia@v1.0.2" -) - -# Deactivates and cleans the project to remove any/all old references -renv::deactivate(clean = TRUE) - -# Initialize the default profile --------- -renv::activate(profile = NULL) -# Use the implicit option so renv crawls the full project. -renv::init(settings = renv::settings$snapshot.type("implicit")) -# Record the explicit package versions mentioned above -renv::record(updatedPackages) -# Force a restore for the default profile -renv::restore(prompt = FALSE) - -# Initialize the dev profile ------------ -renv::activate(profile = "dev") # Creates a new profile called "dev" for development - -# Remove the "tests" directory from the .renvignore -# so the test dependencies are included in the dev lock file -file.copy(".renvignore", ".renvignore-backup", overwrite = TRUE) -ignoreFileContents <- readLines(".renvignore") -ignoreFileContents <- ignoreFileContents[!grepl("/tests/", ignoreFileContents)] -writeLines(ignoreFileContents, ".renvignore") - -# Capture the dependencies -renv::init(profile = "dev") # Init the 'dev' profile renv.lock with the explicit DESCRIPTION references - -# Record the updated packages -renv::record(c(updatedPackages, updatedDevPackages), lockfile = "renv/profiles/dev/renv.lock") - -# Force a restore for the dev profile -renv::restore(prompt = FALSE) - -# Restore the original .renvignore -unlink(".renvignore") -file.rename(".renvignore-backup", ".renvignore") - -# Re-activate the default profile - the dev profile is only used for unit tests -renv::activate(profile = NULL) # Sets the default profile as the default for the project - -# Sync lock files with HADES-wide lock file -------------- -hadesWideLockFileName <- normalizePath("hades-wide.lock") -unlink(hadesWideLockFileName) -utils::download.file( - url = "https://raw.githubusercontent.com/OHDSI/Hades/main/hadesWideReleases/2023Q3/renv.lock", - destfile = hadesWideLockFileName -) -# Verify the package versions across lock files -compareLockFiles <- function(filename1, filename2) { - # Read the lock files - lockfile1 <- renv::lockfile_read( - file = filename1 - ) - print(normalizePath(filename2)) - lockfile2 <- renv::lockfile_read( - file = filename2 - ) - # internal function to read lock file into data frame - lockFileToDataFrame <- function(lf) { - df <- data.frame() - for (i in 1:length(lf$Packages)) { - df <- rbind( - df, - data.frame( - Name = lf$Packages[[i]]$Package, - Version = lf$Packages[[i]]$Version, - RemoteRef = ifelse(is.null(lf$Packages[[i]]$RemoteRef), yes = NA, no = lf$Packages[[i]]$RemoteRef) - ) - ) - } - return(df) - } - lockfile1Packages <- lockFileToDataFrame(lockfile1) - names(lockfile1Packages) <- paste0("lockfile1", names(lockfile1Packages)) - lockfile2Packages <- lockFileToDataFrame(lockfile2) - names(lockfile2Packages) <- paste0("lockfile2", names(lockfile2Packages)) - mergedLockFilePackages <- merge( - x = lockfile1Packages, - y = lockfile2Packages, - by.x = "lockfile1Name", - by.y = "lockfile2Name", - all = TRUE - ) - return(mergedLockFilePackages) -} - -# Compare HADES-wide lock file to the dev lock file -hwVsProjDevLockFile <- compareLockFiles( - filename1 = hadesWideLockFileName, - filename2 = "renv/profiles/dev/renv.lock" -) -hwVsProjDevLockFile[!is.na(hwVsProjDevLockFile$lockfile2Version) & hwVsProjDevLockFile$lockfile1Version != hwVsProjDevLockFile$lockfile2Version, ] - -# Compare project default lock file to the dev lock file -projDevVsProjLockFile <- compareLockFiles( - filename1 = "renv/profiles/dev/renv.lock", - filename2 = "renv.lock" -) -projDevVsProjLockFile[!is.na(projDevVsProjLockFile$lockfile2Version) & projDevVsProjLockFile$lockfile1Version != projDevVsProjLockFile$lockfile2Version, ] - -# Given a source of truth lock file, update the target -# lock file. Only replace the version in the target -# lock file if the version is newer. Provide a warning -# for those packages that could not be evaluated by -# version # -renv::install("semver") -syncLockFile <- function(sourceOfTruthLockFileName, targetLockFileName) { - findPackageByName <- function(list, packageName) { - index <- which(sapply(list, function(x) x$Package == packageName)) - return(index) - } - - # Read the lock files - sourceOfTruthLockFile <- renv::lockfile_read( - file = sourceOfTruthLockFileName - ) - targetLockFile <- renv::lockfile_read( - file = targetLockFileName - ) - - # Compare the lock files to get the differences in package versions - comparedLockFiles <- compareLockFiles( - filename1 = sourceOfTruthLockFileName, - filename2 = targetLockFileName - ) - verDiffs <- comparedLockFiles[!is.na(comparedLockFiles$lockfile2Version) & - comparedLockFiles$lockfile1Version != comparedLockFiles$lockfile2Version, ] - verDiffs <- verDiffs[!is.na(verDiffs$lockfile1Name), ] - - if (nrow(verDiffs) == 0) { - rlang::inform("Lock files are already in sync.") - return(invisible(NULL)) - } - - # Update the target lock file based on the source of truth - for (i in 1:nrow(verDiffs)) { - index <- findPackageByName(targetLockFile$Packages, verDiffs[i, ]$lockfile1Name) - # Can we detect if the version is greater - tryCatch(expr = { - semverPattern <- "^\\d+\\.\\d+\\.\\d+(?:-[0-9A-Za-z-]+(?:\\.[0-9A-Za-z-]+)*)?(?:\\+[0-9A-Za-z-]+)?$" - sourceOfTruthVersion <- verDiffs[i, ]$lockfile1Version - targetVersion <- targetLockFile$Packages[[index]]$Version - if (grepl(semverPattern, sourceOfTruthVersion) && grepl(semverPattern, targetVersion)) { - sourceOfTruthVersion <- semver::parse_version(sourceOfTruthVersion) - targetVersion <- semver::parse_version(targetVersion) - if (sourceOfTruthVersion > targetVersion) { - rlang::inform( - message = paste(verDiffs[i, ]$lockfile1Name, "[", targetVersion, "->", sourceOfTruthVersion, "]") - ) - targetLockFile$Packages[[index]]$Version <- verDiffs[i, ]$lockfile1Version - if (!is.na(verDiffs[i, ]$lockfile1RemoteRef)) { - targetLockFile$Packages[[index]]$RemoteRef <- verDiffs[i, ]$lockfile1RemoteRef - } - } else { - rlang::inform( - message = paste(verDiffs[i, ]$lockfile1Name, "[ SKIPPING - ", targetVersion, ">", sourceOfTruthVersion, "]") - ) - } - } else { - rlang::warn(paste0("Package: [", verDiffs[i, ]$lockfile1Name, "] - version number could not be parsed. Please inspect manually as it may require an upgrade.")) - } - }, error = function(err) { - rlang::inform("An error occurred:", str(err), "\n") - }) - } - - # Save the updated lock file - renv::lockfile_write( - lockfile = targetLockFile, - file = targetLockFileName - ) -} - -syncLockFile( - sourceOfTruthLockFileName = hadesWideLockFileName, - targetLockFileName = "renv/profiles/dev/renv.lock" -) - -syncLockFile( - sourceOfTruthLockFileName = "renv/profiles/dev/renv.lock", - targetLockFileName = "renv.lock" -) - -# NOTE: Use the compare functions above to verify the files are in sync -# and add any dependencies that could not be synced automatically to the -# updatedPackages and updatedDevPackages respectively. diff --git a/extras/TestModule1-0.0.1/hades-wide.lock b/extras/TestModule1-0.0.1/hades-wide.lock deleted file mode 100644 index 089d5ad4..00000000 --- a/extras/TestModule1-0.0.1/hades-wide.lock +++ /dev/null @@ -1,1565 +0,0 @@ -{ - "R" : { - "Version" : "4.2.3", - "Repositories" : [ - { - "Name" : "CRAN", - "URL" : "https://cloud.r-project.org" - } - ] - }, - "Packages" : { - "cli" : { - "Package" : "cli", - "Version" : "3.6.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "glue" : { - "Package" : "glue", - "Version" : "1.6.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "rlang" : { - "Package" : "rlang", - "Version" : "1.1.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "lifecycle" : { - "Package" : "lifecycle", - "Version" : "1.0.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "fansi" : { - "Package" : "fansi", - "Version" : "1.0.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "ps" : { - "Package" : "ps", - "Version" : "1.7.5", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "R6" : { - "Package" : "R6", - "Version" : "2.5.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "utf8" : { - "Package" : "utf8", - "Version" : "1.2.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "vctrs" : { - "Package" : "vctrs", - "Version" : "0.6.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "base64enc" : { - "Package" : "base64enc", - "Version" : "0.1-3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "digest" : { - "Package" : "digest", - "Version" : "0.6.33", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "ellipsis" : { - "Package" : "ellipsis", - "Version" : "0.3.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "fastmap" : { - "Package" : "fastmap", - "Version" : "1.1.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "magrittr" : { - "Package" : "magrittr", - "Version" : "2.0.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "pillar" : { - "Package" : "pillar", - "Version" : "1.9.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "pkgconfig" : { - "Package" : "pkgconfig", - "Version" : "2.0.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "processx" : { - "Package" : "processx", - "Version" : "3.8.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "rprojroot" : { - "Package" : "rprojroot", - "Version" : "2.0.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "withr" : { - "Package" : "withr", - "Version" : "2.5.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "bit" : { - "Package" : "bit", - "Version" : "4.0.5", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "cachem" : { - "Package" : "cachem", - "Version" : "1.0.8", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "callr" : { - "Package" : "callr", - "Version" : "3.7.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "cpp11" : { - "Package" : "cpp11", - "Version" : "0.4.6", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "crayon" : { - "Package" : "crayon", - "Version" : "1.5.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "desc" : { - "Package" : "desc", - "Version" : "1.4.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "fs" : { - "Package" : "fs", - "Version" : "1.6.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "generics" : { - "Package" : "generics", - "Version" : "0.1.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "hms" : { - "Package" : "hms", - "Version" : "1.1.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "htmltools" : { - "Package" : "htmltools", - "Version" : "0.5.6", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "prettyunits" : { - "Package" : "prettyunits", - "Version" : "1.2.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "rappdirs" : { - "Package" : "rappdirs", - "Version" : "0.3.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "Rcpp" : { - "Package" : "Rcpp", - "Version" : "1.0.11", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "stringi" : { - "Package" : "stringi", - "Version" : "1.7.12", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "tibble" : { - "Package" : "tibble", - "Version" : "3.2.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "tidyselect" : { - "Package" : "tidyselect", - "Version" : "1.2.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "xfun" : { - "Package" : "xfun", - "Version" : "0.40", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "backports" : { - "Package" : "backports", - "Version" : "1.4.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "bit64" : { - "Package" : "bit64", - "Version" : "4.0.5", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "colorspace" : { - "Package" : "colorspace", - "Version" : "2.1-0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "diffobj" : { - "Package" : "diffobj", - "Version" : "0.3.5", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "dplyr" : { - "Package" : "dplyr", - "Version" : "1.1.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "evaluate" : { - "Package" : "evaluate", - "Version" : "0.22", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "highr" : { - "Package" : "highr", - "Version" : "0.10", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "jquerylib" : { - "Package" : "jquerylib", - "Version" : "0.1.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "jsonlite" : { - "Package" : "jsonlite", - "Version" : "1.8.7", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "later" : { - "Package" : "later", - "Version" : "1.3.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "lattice" : { - "Package" : "lattice", - "Version" : "0.20-45", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "memoise" : { - "Package" : "memoise", - "Version" : "2.0.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "mime" : { - "Package" : "mime", - "Version" : "0.12", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "pkgbuild" : { - "Package" : "pkgbuild", - "Version" : "1.4.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "progress" : { - "Package" : "progress", - "Version" : "1.2.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "purrr" : { - "Package" : "purrr", - "Version" : "1.0.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "rematch2" : { - "Package" : "rematch2", - "Version" : "2.1.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "sass" : { - "Package" : "sass", - "Version" : "0.4.7", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "stringr" : { - "Package" : "stringr", - "Version" : "1.5.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "sys" : { - "Package" : "sys", - "Version" : "3.4.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "tzdb" : { - "Package" : "tzdb", - "Version" : "0.4.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "yaml" : { - "Package" : "yaml", - "Version" : "2.3.7", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "askpass" : { - "Package" : "askpass", - "Version" : "1.2.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "blob" : { - "Package" : "blob", - "Version" : "1.2.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "brio" : { - "Package" : "brio", - "Version" : "1.1.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "bslib" : { - "Package" : "bslib", - "Version" : "0.5.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "checkmate" : { - "Package" : "checkmate", - "Version" : "2.2.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "clipr" : { - "Package" : "clipr", - "Version" : "0.8.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "DBI" : { - "Package" : "DBI", - "Version" : "1.1.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "farver" : { - "Package" : "farver", - "Version" : "2.1.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "fontawesome" : { - "Package" : "fontawesome", - "Version" : "0.5.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "knitr" : { - "Package" : "knitr", - "Version" : "1.44", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "labeling" : { - "Package" : "labeling", - "Version" : "0.4.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "Matrix" : { - "Package" : "Matrix", - "Version" : "1.6-0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "munsell" : { - "Package" : "munsell", - "Version" : "0.5.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "nlme" : { - "Package" : "nlme", - "Version" : "3.1-162", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "pkgload" : { - "Package" : "pkgload", - "Version" : "1.3.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "plogr" : { - "Package" : "plogr", - "Version" : "0.2.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "praise" : { - "Package" : "praise", - "Version" : "1.0.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "promises" : { - "Package" : "promises", - "Version" : "1.2.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "RColorBrewer" : { - "Package" : "RColorBrewer", - "Version" : "1.1-3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "rJava" : { - "Package" : "rJava", - "Version" : "1.0-6", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "tidyr" : { - "Package" : "tidyr", - "Version" : "1.3.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "tinytex" : { - "Package" : "tinytex", - "Version" : "0.47", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "triebeard" : { - "Package" : "triebeard", - "Version" : "0.4.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "viridisLite" : { - "Package" : "viridisLite", - "Version" : "0.4.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "vroom" : { - "Package" : "vroom", - "Version" : "1.6.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "waldo" : { - "Package" : "waldo", - "Version" : "0.5.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "zoo" : { - "Package" : "zoo", - "Version" : "1.8-12", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "BH" : { - "Package" : "BH", - "Version" : "1.81.0-1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "commonmark" : { - "Package" : "commonmark", - "Version" : "1.9.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "curl" : { - "Package" : "curl", - "Version" : "5.1.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "dbplyr" : { - "Package" : "dbplyr", - "Version" : "2.3.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "gtable" : { - "Package" : "gtable", - "Version" : "0.3.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "httpuv" : { - "Package" : "httpuv", - "Version" : "1.6.11", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "isoband" : { - "Package" : "isoband", - "Version" : "0.2.7", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "lazyeval" : { - "Package" : "lazyeval", - "Version" : "0.2.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "MASS" : { - "Package" : "MASS", - "Version" : "7.3-58.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "mathjaxr" : { - "Package" : "mathjaxr", - "Version" : "1.6-0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "mgcv" : { - "Package" : "mgcv", - "Version" : "1.8-42", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "openssl" : { - "Package" : "openssl", - "Version" : "2.1.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "readr" : { - "Package" : "readr", - "Version" : "2.1.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "rmarkdown" : { - "Package" : "rmarkdown", - "Version" : "2.25", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "RSQLite" : { - "Package" : "RSQLite", - "Version" : "2.3.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "scales" : { - "Package" : "scales", - "Version" : "1.2.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "snow" : { - "Package" : "snow", - "Version" : "0.4-4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "sourcetools" : { - "Package" : "sourcetools", - "Version" : "0.1.7-1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "SqlRender" : { - "Package" : "SqlRender", - "Version" : "1.16.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "testthat" : { - "Package" : "testthat", - "Version" : "3.1.10", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "urltools" : { - "Package" : "urltools", - "Version" : "1.7.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "xml2" : { - "Package" : "xml2", - "Version" : "1.3.5", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "xtable" : { - "Package" : "xtable", - "Version" : "1.8-4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "xts" : { - "Package" : "xts", - "Version" : "0.13.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "zip" : { - "Package" : "zip", - "Version" : "2.3.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "Andromeda" : { - "Package" : "Andromeda", - "Version" : "0.6.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "anytime" : { - "Package" : "anytime", - "Version" : "0.3.9", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "boot" : { - "Package" : "boot", - "Version" : "1.3-28.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "crosstalk" : { - "Package" : "crosstalk", - "Version" : "1.2.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "data.table" : { - "Package" : "data.table", - "Version" : "1.14.8", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "DatabaseConnector" : { - "Package" : "DatabaseConnector", - "Version" : "6.2.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "formatR" : { - "Package" : "formatR", - "Version" : "1.14", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "ggplot2" : { - "Package" : "ggplot2", - "Version" : "3.4.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "here" : { - "Package" : "here", - "Version" : "1.0.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "htmlwidgets" : { - "Package" : "htmlwidgets", - "Version" : "1.6.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "httr" : { - "Package" : "httr", - "Version" : "1.4.7", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "metadat" : { - "Package" : "metadat", - "Version" : "1.2-0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "minqa" : { - "Package" : "minqa", - "Version" : "1.2.6", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "nloptr" : { - "Package" : "nloptr", - "Version" : "2.0.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "numDeriv" : { - "Package" : "numDeriv", - "Version" : "2016.8-1.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "ParallelLogger" : { - "Package" : "ParallelLogger", - "Version" : "3.3.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "pbapply" : { - "Package" : "pbapply", - "Version" : "1.7-2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "plyr" : { - "Package" : "plyr", - "Version" : "1.8.9", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "png" : { - "Package" : "png", - "Version" : "0.1-8", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "RcppEigen" : { - "Package" : "RcppEigen", - "Version" : "0.3.3.9.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "RcppTOML" : { - "Package" : "RcppTOML", - "Version" : "0.2.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "reactR" : { - "Package" : "reactR", - "Version" : "0.4.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "RJSONIO" : { - "Package" : "RJSONIO", - "Version" : "1.3-1.8", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "shiny" : { - "Package" : "shiny", - "Version" : "1.7.5", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "survival" : { - "Package" : "survival", - "Version" : "3.5-3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "timechange" : { - "Package" : "timechange", - "Version" : "0.2.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "TTR" : { - "Package" : "TTR", - "Version" : "0.24.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "assertthat" : { - "Package" : "assertthat", - "Version" : "0.2.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "CompQuadForm" : { - "Package" : "CompQuadForm", - "Version" : "1.4.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "cowplot" : { - "Package" : "cowplot", - "Version" : "1.1.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "Cyclops" : { - "Package" : "Cyclops", - "Version" : "3.3.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "distributional" : { - "Package" : "distributional", - "Version" : "0.3.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "DT" : { - "Package" : "DT", - "Version" : "0.30", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "filelock" : { - "Package" : "filelock", - "Version" : "1.0.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "futile.options" : { - "Package" : "futile.options", - "Version" : "1.0.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "gridExtra" : { - "Package" : "gridExtra", - "Version" : "2.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "lambda.r" : { - "Package" : "lambda.r", - "Version" : "1.2.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "lme4" : { - "Package" : "lme4", - "Version" : "1.1-34", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "lubridate" : { - "Package" : "lubridate", - "Version" : "1.9.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "markdown" : { - "Package" : "markdown", - "Version" : "1.9", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "memuse" : { - "Package" : "memuse", - "Version" : "4.2-3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "metafor" : { - "Package" : "metafor", - "Version" : "4.4-0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "plotly" : { - "Package" : "plotly", - "Version" : "4.10.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "polspline" : { - "Package" : "polspline", - "Version" : "1.1.23", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "pool" : { - "Package" : "pool", - "Version" : "1.0.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "pROC" : { - "Package" : "pROC", - "Version" : "1.18.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "PRROC" : { - "Package" : "PRROC", - "Version" : "1.3.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "quadprog" : { - "Package" : "quadprog", - "Version" : "1.5-8", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "quantmod" : { - "Package" : "quantmod", - "Version" : "0.4.25", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "reactable" : { - "Package" : "reactable", - "Version" : "0.4.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "reticulate" : { - "Package" : "reticulate", - "Version" : "1.32.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "shinycssloaders" : { - "Package" : "shinycssloaders", - "Version" : "1.0.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "shinydashboard" : { - "Package" : "shinydashboard", - "Version" : "0.7.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "shinyWidgets" : { - "Package" : "shinyWidgets", - "Version" : "0.8.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "sodium" : { - "Package" : "sodium", - "Version" : "1.3.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "tippy" : { - "Package" : "tippy", - "Version" : "0.1.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "BeastJar" : { - "Package" : "BeastJar", - "Version" : "1.10.6", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "clock" : { - "Package" : "clock", - "Version" : "0.7.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "coda" : { - "Package" : "coda", - "Version" : "0.19-4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "EmpiricalCalibration" : { - "Package" : "EmpiricalCalibration", - "Version" : "3.1.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "futile.logger" : { - "Package" : "futile.logger", - "Version" : "1.4.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "ggdist" : { - "Package" : "ggdist", - "Version" : "3.3.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "HDInterval" : { - "Package" : "HDInterval", - "Version" : "0.2.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "keyring" : { - "Package" : "keyring", - "Version" : "1.3.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "meta" : { - "Package" : "meta", - "Version" : "6.5-0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "openxlsx" : { - "Package" : "openxlsx", - "Version" : "4.2.5.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "rateratio.test" : { - "Package" : "rateratio.test", - "Version" : "1.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "remotes" : { - "Package" : "remotes", - "Version" : "2.4.2.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "tseries" : { - "Package" : "tseries", - "Version" : "0.10-54", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "Achilles" : { - "Package" : "Achilles", - "Version" : "1.7.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "BrokenAdaptiveRidge" : { - "Package" : "BrokenAdaptiveRidge", - "Version" : "1.0.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "CohortExplorer" : { - "Package" : "CohortExplorer", - "Version" : "0.0.17", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "EvidenceSynthesis" : { - "Package" : "EvidenceSynthesis", - "Version" : "0.5.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "IterativeHardThresholding" : { - "Package" : "IterativeHardThresholding", - "Version" : "1.0.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "lightgbm" : { - "Package" : "lightgbm", - "Version" : "3.3.5", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "broom" : { - "Package" : "broom", - "Version" : "1.0.5", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "MatrixModels" : { - "Package" : "MatrixModels", - "Version" : "0.5-2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "SparseM" : { - "Package" : "SparseM", - "Version" : "1.81", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "abind" : { - "Package" : "abind", - "Version" : "1.4-5", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "carData" : { - "Package" : "carData", - "Version" : "3.0-5", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "nnet" : { - "Package" : "nnet", - "Version" : "7.3-18", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "pbkrtest" : { - "Package" : "pbkrtest", - "Version" : "0.5.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "quantreg" : { - "Package" : "quantreg", - "Version" : "5.97", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "car" : { - "Package" : "car", - "Version" : "3.1-2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "corrplot" : { - "Package" : "corrplot", - "Version" : "0.92", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "jpeg" : { - "Package" : "jpeg", - "Version" : "0.1-10", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "exactRankTests" : { - "Package" : "exactRankTests", - "Version" : "0.8-35", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "ggrepel" : { - "Package" : "ggrepel", - "Version" : "0.9.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "ggsci" : { - "Package" : "ggsci", - "Version" : "3.0.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "ggsignif" : { - "Package" : "ggsignif", - "Version" : "0.6.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "gridtext" : { - "Package" : "gridtext", - "Version" : "0.1.5", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "km.ci" : { - "Package" : "km.ci", - "Version" : "0.5-6", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "KMsurv" : { - "Package" : "KMsurv", - "Version" : "0.1-5", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "mvtnorm" : { - "Package" : "mvtnorm", - "Version" : "1.2-3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "polynom" : { - "Package" : "polynom", - "Version" : "1.4-1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "rstatix" : { - "Package" : "rstatix", - "Version" : "0.7.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "ggpubr" : { - "Package" : "ggpubr", - "Version" : "0.6.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "ggtext" : { - "Package" : "ggtext", - "Version" : "0.1.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "maxstat" : { - "Package" : "maxstat", - "Version" : "0.7-25", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "survMisc" : { - "Package" : "survMisc", - "Version" : "0.5.6", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "survminer" : { - "Package" : "survminer", - "Version" : "0.4.9", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "aws.signature" : { - "Package" : "aws.signature", - "Version" : "0.6.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "aws.s3" : { - "Package" : "aws.s3", - "Version" : "0.3.21", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "R.methodsS3" : { - "Package" : "R.methodsS3", - "Version" : "1.8.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "R.oo" : { - "Package" : "R.oo", - "Version" : "1.25.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "R.utils" : { - "Package" : "R.utils", - "Version" : "2.12.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "renv" : { - "Package" : "renv", - "Version" : "1.0.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "CirceR" : { - "Package" : "CirceR", - "Version" : "1.3.1", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "CirceR", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v1.3.1" - }, - "FeatureExtraction" : { - "Package" : "FeatureExtraction", - "Version" : "3.3.1", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "FeatureExtraction", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v3.3.1" - }, - "CohortGenerator" : { - "Package" : "CohortGenerator", - "Version" : "0.8.0", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "CohortGenerator", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v0.8.0" - }, - "OhdsiShinyModules" : { - "Package" : "OhdsiShinyModules", - "Version" : "1.1.0", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "OhdsiShinyModules", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v1.1.0" - }, - "PatientLevelPrediction" : { - "Package" : "PatientLevelPrediction", - "Version" : "6.3.5", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "PatientLevelPrediction", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v6.3.5" - }, - "ResultModelManager" : { - "Package" : "ResultModelManager", - "Version" : "0.5.1", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "ResultModelManager", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v0.5.1" - }, - "BigKnn" : { - "Package" : "BigKnn", - "Version" : "1.0.2", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "BigKnn", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v1.0.2" - }, - "Capr" : { - "Package" : "Capr", - "Version" : "2.0.7", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "Capr", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v2.0.7" - }, - "Characterization" : { - "Package" : "Characterization", - "Version" : "0.1.1", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "Characterization", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v0.1.1" - }, - "CohortDiagnostics" : { - "Package" : "CohortDiagnostics", - "Version" : "3.2.4", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "CohortDiagnostics", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v3.2.4" - }, - "CohortMethod" : { - "Package" : "CohortMethod", - "Version" : "5.1.0", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "CohortMethod", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v5.1.0" - }, - "DataQualityDashboard" : { - "Package" : "DataQualityDashboard", - "Version" : "2.4.0", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "DataQualityDashboard", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v2.4.0" - }, - "DeepPatientLevelPrediction" : { - "Package" : "DeepPatientLevelPrediction", - "Version" : "2.0.0", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "DeepPatientLevelPrediction", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v2.0.0" - }, - "EnsemblePatientLevelPrediction" : { - "Package" : "EnsemblePatientLevelPrediction", - "Version" : "1.0.2", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "EnsemblePatientLevelPrediction", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v1.0.2" - }, - "Eunomia" : { - "Package" : "Eunomia", - "Version" : "1.0.2", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "Eunomia", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v1.0.2" - }, - "Hydra" : { - "Package" : "Hydra", - "Version" : "0.4.0", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "Hydra", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v0.4.0" - }, - "MethodEvaluation" : { - "Package" : "MethodEvaluation", - "Version" : "2.3.0", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "MethodEvaluation", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v2.3.0" - }, - "OhdsiSharing" : { - "Package" : "OhdsiSharing", - "Version" : "0.2.2", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "OhdsiSharing", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v0.2.2" - }, - "PhenotypeLibrary" : { - "Package" : "PhenotypeLibrary", - "Version" : "3.25.0", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "PhenotypeLibrary", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v3.25.0" - }, - "PheValuator" : { - "Package" : "PheValuator", - "Version" : "2.2.10", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "PheValuator", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v2.2.10" - }, - "ROhdsiWebApi" : { - "Package" : "ROhdsiWebApi", - "Version" : "1.3.3", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "ROhdsiWebApi", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v1.3.3" - }, - "SelfControlledCaseSeries" : { - "Package" : "SelfControlledCaseSeries", - "Version" : "4.2.0", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "SelfControlledCaseSeries", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v4.2.0" - }, - "SelfControlledCohort" : { - "Package" : "SelfControlledCohort", - "Version" : "1.6.0", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "SelfControlledCohort", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v1.6.0" - }, - "ShinyAppBuilder" : { - "Package" : "ShinyAppBuilder", - "Version" : "1.1.2", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "ShinyAppBuilder", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v1.1.2" - }, - "Hades" : { - "Package" : "Hades", - "Version" : "1.13.0", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "Hades", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v1.13.0" - } - } -} diff --git a/extras/TestModule1-0.0.1/renv.lock b/extras/TestModule1-0.0.1/renv.lock deleted file mode 100644 index 5b9b102d..00000000 --- a/extras/TestModule1-0.0.1/renv.lock +++ /dev/null @@ -1,887 +0,0 @@ -{ - "R": { - "Version": "4.2.3", - "Repositories": [ - { - "Name": "CRAN", - "URL": "https://packagemanager.posit.co/cran/latest" - } - ] - }, - "Packages": { - "CohortGenerator": { - "Package": "CohortGenerator", - "Version": "0.8.1", - "Source": "GitHub", - "RemoteType": "github", - "RemoteHost": "api.github.com", - "RemoteUsername": "OHDSI", - "RemoteRepo": "CohortGenerator", - "RemoteRef": "v0.8.1", - "RemoteSha": "78757f1b191a395cf9dcff0d5bbe2b9fa4aa163e" - }, - "DBI": { - "Package": "DBI", - "Version": "1.1.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "b2866e62bab9378c3cc9476a1954226b" - }, - "DatabaseConnector": { - "Package": "DatabaseConnector", - "Version": "6.3.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "DBI", - "R", - "SqlRender", - "bit64", - "checkmate", - "dbplyr", - "digest", - "methods", - "rJava", - "readr", - "rlang", - "stringr", - "urltools", - "utils" - ], - "Hash": "1ef65614602c6534a6c666e872c3b647" - }, - "ParallelLogger": { - "Package": "ParallelLogger", - "Version": "3.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "jsonlite", - "methods", - "snow", - "utils", - "xml2" - ], - "Hash": "8d893bed8c8bfe21217464dd3f9ec3e9" - }, - "R6": { - "Package": "R6", - "Version": "2.5.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "470851b6d5d0ac559e9d01bb352b4021" - }, - "RJSONIO": { - "Package": "RJSONIO", - "Version": "1.3-1.8", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "methods" - ], - "Hash": "cd79d1874fb20217463451f8c310c526" - }, - "RSQLite": { - "Package": "RSQLite", - "Version": "2.3.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "DBI", - "R", - "bit64", - "blob", - "cpp11", - "memoise", - "methods", - "pkgconfig", - "plogr" - ], - "Hash": "207c90cd5438a1f596da2cd54c606fee" - }, - "Rcpp": { - "Package": "Rcpp", - "Version": "1.0.11", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "methods", - "utils" - ], - "Hash": "e749cae40fa9ef469b6050959517453c" - }, - "ResultModelManager": { - "Package": "ResultModelManager", - "Version": "0.5.6", - "Source": "GitHub", - "RemoteType": "github", - "RemoteHost": "api.github.com", - "RemoteUsername": "OHDSI", - "RemoteRepo": "ResultModelManager", - "RemoteRef": "v0.5.6", - "RemoteSha": "3033804e5af77b8b8dacda67c4d6853731e3641b" - }, - "SqlRender": { - "Package": "SqlRender", - "Version": "1.16.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "checkmate", - "rJava", - "rlang" - ], - "Hash": "94d9cae91bbd8aed211bea82aff7cf77" - }, - "askpass": { - "Package": "askpass", - "Version": "1.2.0", - "Source": "Repository", - "Repository": "CRAN" - }, - "assertthat": { - "Package": "assertthat", - "Version": "0.2.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "tools" - ], - "Hash": "50c838a310445e954bc13f26f26a6ecf" - }, - "backports": { - "Package": "backports", - "Version": "1.4.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "c39fbec8a30d23e721980b8afb31984c" - }, - "bit": { - "Package": "bit", - "Version": "4.0.5", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "d242abec29412ce988848d0294b208fd" - }, - "bit64": { - "Package": "bit64", - "Version": "4.0.5", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "bit", - "methods", - "stats", - "utils" - ], - "Hash": "9fe98599ca456d6552421db0d6772d8f" - }, - "blob": { - "Package": "blob", - "Version": "1.2.4", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "methods", - "rlang", - "vctrs" - ], - "Hash": "40415719b5a479b87949f3aa0aee737c" - }, - "cachem": { - "Package": "cachem", - "Version": "1.0.8", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "fastmap", - "rlang" - ], - "Hash": "cda74447c42f529de601fe4d4050daef" - }, - "checkmate": { - "Package": "checkmate", - "Version": "2.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "backports", - "utils" - ], - "Hash": "ed4275b13c6ab74b89a31def0b6bf835" - }, - "cli": { - "Package": "cli", - "Version": "3.6.1", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "utils" - ], - "Hash": "89e6d8219950eac806ae0c489052048a" - }, - "clipr": { - "Package": "clipr", - "Version": "0.8.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "utils" - ], - "Hash": "3f038e5ac7f41d4ac41ce658c85e3042" - }, - "cpp11": { - "Package": "cpp11", - "Version": "0.4.6", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "ed588261931ee3be2c700d22e94a29ab" - }, - "crayon": { - "Package": "crayon", - "Version": "1.5.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "grDevices", - "methods", - "utils" - ], - "Hash": "e8a1e41acf02548751f45c718d55aa6a" - }, - "dbplyr": { - "Package": "dbplyr", - "Version": "2.3.4", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "DBI", - "R", - "R6", - "blob", - "cli", - "dplyr", - "glue", - "lifecycle", - "magrittr", - "methods", - "pillar", - "purrr", - "rlang", - "tibble", - "tidyr", - "tidyselect", - "utils", - "vctrs", - "withr" - ], - "Hash": "d24305b92db333726aed162a2c23a147" - }, - "digest": { - "Package": "digest", - "Version": "0.6.33", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "utils" - ], - "Hash": "8b708f296afd9ae69f450f9640be8990" - }, - "dplyr": { - "Package": "dplyr", - "Version": "1.1.3", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "R6", - "cli", - "generics", - "glue", - "lifecycle", - "magrittr", - "methods", - "pillar", - "rlang", - "tibble", - "tidyselect", - "utils", - "vctrs" - ], - "Hash": "eb5742d256a0d9306d85ea68756d8187" - }, - "fansi": { - "Package": "fansi", - "Version": "1.0.4", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "grDevices", - "utils" - ], - "Hash": "1d9e7ad3c8312a192dea7d3db0274fde" - }, - "fastmap": { - "Package": "fastmap", - "Version": "1.1.1", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "f7736a18de97dea803bde0a2daaafb27" - }, - "filelock": { - "Package": "filelock", - "Version": "1.0.2", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "38ec653c2613bed60052ba3787bd8a2c" - }, - "generics": { - "Package": "generics", - "Version": "0.1.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "15e9634c0fcd294799e9b2e929ed1b86" - }, - "glue": { - "Package": "glue", - "Version": "1.6.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "4f2596dfb05dac67b9dc558e5c6fba2e" - }, - "hms": { - "Package": "hms", - "Version": "1.1.3", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "lifecycle", - "methods", - "pkgconfig", - "rlang", - "vctrs" - ], - "Hash": "b59377caa7ed00fa41808342002138f9" - }, - "jsonlite": { - "Package": "jsonlite", - "Version": "1.8.7", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "methods" - ], - "Hash": "a4269a09a9b865579b2635c77e572374" - }, - "keyring": { - "Package": "keyring", - "Version": "1.3.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R6", - "askpass", - "assertthat", - "filelock", - "openssl", - "rappdirs", - "sodium", - "tools", - "utils", - "yaml" - ], - "Hash": "b7880ebefe188d62b099673bbc04afac" - }, - "later": { - "Package": "later", - "Version": "1.3.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "Rcpp", - "rlang" - ], - "Hash": "7e7b457d7766bc47f2a5f21cc2984f8e" - }, - "lifecycle": { - "Package": "lifecycle", - "Version": "1.0.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "glue", - "rlang" - ], - "Hash": "001cecbeac1cff9301bdc3775ee46a86" - }, - "lubridate": { - "Package": "lubridate", - "Version": "1.9.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "generics", - "methods", - "timechange" - ], - "Hash": "e25f18436e3efd42c7c590a1c4c15390" - }, - "magrittr": { - "Package": "magrittr", - "Version": "2.0.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "7ce2733a9826b3aeb1775d56fd305472" - }, - "memoise": { - "Package": "memoise", - "Version": "2.0.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "cachem", - "rlang" - ], - "Hash": "e2817ccf4a065c5d9d7f2cfbe7c1d78c" - }, - "openssl": { - "Package": "openssl", - "Version": "2.1.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "askpass" - ], - "Hash": "0f7cd2962e3044bb940cca4f4b5cecbe" - }, - "pillar": { - "Package": "pillar", - "Version": "1.9.0", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "cli", - "fansi", - "glue", - "lifecycle", - "rlang", - "utf8", - "utils", - "vctrs" - ], - "Hash": "15da5a8412f317beeee6175fbc76f4bb" - }, - "pkgconfig": { - "Package": "pkgconfig", - "Version": "2.0.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "utils" - ], - "Hash": "01f28d4278f15c76cddbea05899c5d6f" - }, - "plogr": { - "Package": "plogr", - "Version": "0.2.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "09eb987710984fc2905c7129c7d85e65" - }, - "pool": { - "Package": "pool", - "Version": "1.0.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "DBI", - "R", - "R6", - "later", - "methods", - "rlang", - "withr" - ], - "Hash": "52d086ff1a2ccccbae6d462cb0773835" - }, - "prettyunits": { - "Package": "prettyunits", - "Version": "1.2.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "95ef9167b75dde9d2ccc3c7528393e7e" - }, - "progress": { - "Package": "progress", - "Version": "1.2.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R6", - "crayon", - "hms", - "prettyunits" - ], - "Hash": "14dc9f7a3c91ebb14ec5bb9208a07061" - }, - "purrr": { - "Package": "purrr", - "Version": "1.0.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "lifecycle", - "magrittr", - "rlang", - "vctrs" - ], - "Hash": "d71c815267c640f17ddbf7f16144b4bb" - }, - "rJava": { - "Package": "rJava", - "Version": "1.0-6", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "0415819f6baa75d86d52483f7292b623" - }, - "rappdirs": { - "Package": "rappdirs", - "Version": "0.3.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "5e3c5dc0b071b21fa128676560dbe94d" - }, - "readr": { - "Package": "readr", - "Version": "2.1.4", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "R6", - "cli", - "clipr", - "cpp11", - "crayon", - "hms", - "lifecycle", - "methods", - "rlang", - "tibble", - "tzdb", - "utils", - "vroom" - ], - "Hash": "b5047343b3825f37ad9d3b5d89aa1078" - }, - "renv": { - "Package": "renv", - "Version": "1.0.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "utils" - ], - "Hash": "41b847654f567341725473431dd0d5ab" - }, - "rlang": { - "Package": "rlang", - "Version": "1.1.1", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "utils" - ], - "Hash": "dc079ccd156cde8647360f473c1fa718" - }, - "snow": { - "Package": "snow", - "Version": "0.4-4", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "utils" - ], - "Hash": "40b74690debd20c57d93d8c246b305d4" - }, - "sodium": { - "Package": "sodium", - "Version": "1.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "3606bb09e0914edd4fc8313b500dcd5e" - }, - "stringi": { - "Package": "stringi", - "Version": "1.7.12", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "stats", - "tools", - "utils" - ], - "Hash": "ca8bd84263c77310739d2cf64d84d7c9" - }, - "stringr": { - "Package": "stringr", - "Version": "1.5.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "glue", - "lifecycle", - "magrittr", - "rlang", - "stringi", - "vctrs" - ], - "Hash": "671a4d384ae9d32fc47a14e98bfa3dc8" - }, - "sys": { - "Package": "sys", - "Version": "3.4.2", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "34c16f1ef796057bfa06d3f4ff818a5d" - }, - "tibble": { - "Package": "tibble", - "Version": "3.2.1", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "fansi", - "lifecycle", - "magrittr", - "methods", - "pillar", - "pkgconfig", - "rlang", - "utils", - "vctrs" - ], - "Hash": "a84e2cc86d07289b3b6f5069df7a004c" - }, - "tidyr": { - "Package": "tidyr", - "Version": "1.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "cpp11", - "dplyr", - "glue", - "lifecycle", - "magrittr", - "purrr", - "rlang", - "stringr", - "tibble", - "tidyselect", - "utils", - "vctrs" - ], - "Hash": "e47debdc7ce599b070c8e78e8ac0cfcf" - }, - "tidyselect": { - "Package": "tidyselect", - "Version": "1.2.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "glue", - "lifecycle", - "rlang", - "vctrs", - "withr" - ], - "Hash": "79540e5fcd9e0435af547d885f184fd5" - }, - "timechange": { - "Package": "timechange", - "Version": "0.2.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cpp11" - ], - "Hash": "8548b44f79a35ba1791308b61e6012d7" - }, - "triebeard": { - "Package": "triebeard", - "Version": "0.4.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "Rcpp" - ], - "Hash": "642507a148b0dd9b5620177e0a044413" - }, - "tzdb": { - "Package": "tzdb", - "Version": "0.4.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cpp11" - ], - "Hash": "b2e1cbce7c903eaf23ec05c58e59fb5e" - }, - "urltools": { - "Package": "urltools", - "Version": "1.7.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "Rcpp", - "methods", - "triebeard" - ], - "Hash": "e86a704261a105f4703f653e05defa3e" - }, - "utf8": { - "Package": "utf8", - "Version": "1.2.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "1fe17157424bb09c48a8b3b550c753bc" - }, - "vctrs": { - "Package": "vctrs", - "Version": "0.6.3", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "cli", - "glue", - "lifecycle", - "rlang" - ], - "Hash": "06eceb3a5d716fd0654cc23ca3d71a99" - }, - "vroom": { - "Package": "vroom", - "Version": "1.6.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "bit64", - "cli", - "cpp11", - "crayon", - "glue", - "hms", - "lifecycle", - "methods", - "progress", - "rlang", - "stats", - "tibble", - "tidyselect", - "tzdb", - "vctrs", - "withr" - ], - "Hash": "7015a74373b83ffaef64023f4a0f5033" - }, - "withr": { - "Package": "withr", - "Version": "2.5.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "grDevices", - "graphics", - "stats" - ], - "Hash": "c0e49a9760983e81e55cdd9be92e7182" - }, - "xml2": { - "Package": "xml2", - "Version": "1.3.5", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "40682ed6a969ea5abfd351eb67833adc" - }, - "yaml": { - "Package": "yaml", - "Version": "2.3.7", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "0d0056cc5383fbc240ccd0cb584bf436" - }, - "zip": { - "Package": "zip", - "Version": "2.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "d98c94dacb7e0efcf83b0a133a705504" - } - } -} diff --git a/extras/TestModule1-0.0.1/renv/.gitignore b/extras/TestModule1-0.0.1/renv/.gitignore deleted file mode 100644 index 0ec0cbba..00000000 --- a/extras/TestModule1-0.0.1/renv/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -library/ -local/ -cellar/ -lock/ -python/ -sandbox/ -staging/ diff --git a/extras/TestModule1-0.0.1/renv/activate.R b/extras/TestModule1-0.0.1/renv/activate.R deleted file mode 100644 index cb5401f9..00000000 --- a/extras/TestModule1-0.0.1/renv/activate.R +++ /dev/null @@ -1,1180 +0,0 @@ - -local({ - - # the requested version of renv - version <- "1.0.3" - attr(version, "sha") <- NULL - - # the project directory - project <- getwd() - - # use start-up diagnostics if enabled - diagnostics <- Sys.getenv("RENV_STARTUP_DIAGNOSTICS", unset = "FALSE") - if (diagnostics) { - start <- Sys.time() - profile <- tempfile("renv-startup-", fileext = ".Rprof") - utils::Rprof(profile) - on.exit({ - utils::Rprof(NULL) - elapsed <- signif(difftime(Sys.time(), start, units = "auto"), digits = 2L) - writeLines(sprintf("- renv took %s to run the autoloader.", format(elapsed))) - writeLines(sprintf("- Profile: %s", profile)) - print(utils::summaryRprof(profile)) - }, add = TRUE) - } - - # figure out whether the autoloader is enabled - enabled <- local({ - - # first, check config option - override <- getOption("renv.config.autoloader.enabled") - if (!is.null(override)) - return(override) - - # next, check environment variables - # TODO: prefer using the configuration one in the future - envvars <- c( - "RENV_CONFIG_AUTOLOADER_ENABLED", - "RENV_AUTOLOADER_ENABLED", - "RENV_ACTIVATE_PROJECT" - ) - - for (envvar in envvars) { - envval <- Sys.getenv(envvar, unset = NA) - if (!is.na(envval)) - return(tolower(envval) %in% c("true", "t", "1")) - } - - # enable by default - TRUE - - }) - - if (!enabled) - return(FALSE) - - # avoid recursion - if (identical(getOption("renv.autoloader.running"), TRUE)) { - warning("ignoring recursive attempt to run renv autoloader") - return(invisible(TRUE)) - } - - # signal that we're loading renv during R startup - options(renv.autoloader.running = TRUE) - on.exit(options(renv.autoloader.running = NULL), add = TRUE) - - # signal that we've consented to use renv - options(renv.consent = TRUE) - - # load the 'utils' package eagerly -- this ensures that renv shims, which - # mask 'utils' packages, will come first on the search path - library(utils, lib.loc = .Library) - - # unload renv if it's already been loaded - if ("renv" %in% loadedNamespaces()) - unloadNamespace("renv") - - # load bootstrap tools - `%||%` <- function(x, y) { - if (is.null(x)) y else x - } - - catf <- function(fmt, ..., appendLF = TRUE) { - - quiet <- getOption("renv.bootstrap.quiet", default = FALSE) - if (quiet) - return(invisible()) - - msg <- sprintf(fmt, ...) - cat(msg, file = stdout(), sep = if (appendLF) "\n" else "") - - invisible(msg) - - } - - header <- function(label, - ..., - prefix = "#", - suffix = "-", - n = min(getOption("width"), 78)) - { - label <- sprintf(label, ...) - n <- max(n - nchar(label) - nchar(prefix) - 2L, 8L) - if (n <= 0) - return(paste(prefix, label)) - - tail <- paste(rep.int(suffix, n), collapse = "") - paste0(prefix, " ", label, " ", tail) - - } - - startswith <- function(string, prefix) { - substring(string, 1, nchar(prefix)) == prefix - } - - bootstrap <- function(version, library) { - - friendly <- renv_bootstrap_version_friendly(version) - section <- header(sprintf("Bootstrapping renv %s", friendly)) - catf(section) - - # attempt to download renv - catf("- Downloading renv ... ", appendLF = FALSE) - withCallingHandlers( - tarball <- renv_bootstrap_download(version), - error = function(err) { - catf("FAILED") - stop("failed to download:\n", conditionMessage(err)) - } - ) - catf("OK") - on.exit(unlink(tarball), add = TRUE) - - # now attempt to install - catf("- Installing renv ... ", appendLF = FALSE) - withCallingHandlers( - status <- renv_bootstrap_install(version, tarball, library), - error = function(err) { - catf("FAILED") - stop("failed to install:\n", conditionMessage(err)) - } - ) - catf("OK") - - # add empty line to break up bootstrapping from normal output - catf("") - - return(invisible()) - } - - renv_bootstrap_tests_running <- function() { - getOption("renv.tests.running", default = FALSE) - } - - renv_bootstrap_repos <- function() { - - # get CRAN repository - cran <- getOption("renv.repos.cran", "https://cloud.r-project.org") - - # check for repos override - repos <- Sys.getenv("RENV_CONFIG_REPOS_OVERRIDE", unset = NA) - if (!is.na(repos)) { - - # check for RSPM; if set, use a fallback repository for renv - rspm <- Sys.getenv("RSPM", unset = NA) - if (identical(rspm, repos)) - repos <- c(RSPM = rspm, CRAN = cran) - - return(repos) - - } - - # check for lockfile repositories - repos <- tryCatch(renv_bootstrap_repos_lockfile(), error = identity) - if (!inherits(repos, "error") && length(repos)) - return(repos) - - # retrieve current repos - repos <- getOption("repos") - - # ensure @CRAN@ entries are resolved - repos[repos == "@CRAN@"] <- cran - - # add in renv.bootstrap.repos if set - default <- c(FALLBACK = "https://cloud.r-project.org") - extra <- getOption("renv.bootstrap.repos", default = default) - repos <- c(repos, extra) - - # remove duplicates that might've snuck in - dupes <- duplicated(repos) | duplicated(names(repos)) - repos[!dupes] - - } - - renv_bootstrap_repos_lockfile <- function() { - - lockpath <- Sys.getenv("RENV_PATHS_LOCKFILE", unset = "renv.lock") - if (!file.exists(lockpath)) - return(NULL) - - lockfile <- tryCatch(renv_json_read(lockpath), error = identity) - if (inherits(lockfile, "error")) { - warning(lockfile) - return(NULL) - } - - repos <- lockfile$R$Repositories - if (length(repos) == 0) - return(NULL) - - keys <- vapply(repos, `[[`, "Name", FUN.VALUE = character(1)) - vals <- vapply(repos, `[[`, "URL", FUN.VALUE = character(1)) - names(vals) <- keys - - return(vals) - - } - - renv_bootstrap_download <- function(version) { - - sha <- attr(version, "sha", exact = TRUE) - - methods <- if (!is.null(sha)) { - - # attempting to bootstrap a development version of renv - c( - function() renv_bootstrap_download_tarball(sha), - function() renv_bootstrap_download_github(sha) - ) - - } else { - - # attempting to bootstrap a release version of renv - c( - function() renv_bootstrap_download_tarball(version), - function() renv_bootstrap_download_cran_latest(version), - function() renv_bootstrap_download_cran_archive(version) - ) - - } - - for (method in methods) { - path <- tryCatch(method(), error = identity) - if (is.character(path) && file.exists(path)) - return(path) - } - - stop("All download methods failed") - - } - - renv_bootstrap_download_impl <- function(url, destfile) { - - mode <- "wb" - - # https://bugs.r-project.org/bugzilla/show_bug.cgi?id=17715 - fixup <- - Sys.info()[["sysname"]] == "Windows" && - substring(url, 1L, 5L) == "file:" - - if (fixup) - mode <- "w+b" - - args <- list( - url = url, - destfile = destfile, - mode = mode, - quiet = TRUE - ) - - if ("headers" %in% names(formals(utils::download.file))) - args$headers <- renv_bootstrap_download_custom_headers(url) - - do.call(utils::download.file, args) - - } - - renv_bootstrap_download_custom_headers <- function(url) { - - headers <- getOption("renv.download.headers") - if (is.null(headers)) - return(character()) - - if (!is.function(headers)) - stopf("'renv.download.headers' is not a function") - - headers <- headers(url) - if (length(headers) == 0L) - return(character()) - - if (is.list(headers)) - headers <- unlist(headers, recursive = FALSE, use.names = TRUE) - - ok <- - is.character(headers) && - is.character(names(headers)) && - all(nzchar(names(headers))) - - if (!ok) - stop("invocation of 'renv.download.headers' did not return a named character vector") - - headers - - } - - renv_bootstrap_download_cran_latest <- function(version) { - - spec <- renv_bootstrap_download_cran_latest_find(version) - type <- spec$type - repos <- spec$repos - - baseurl <- utils::contrib.url(repos = repos, type = type) - ext <- if (identical(type, "source")) - ".tar.gz" - else if (Sys.info()[["sysname"]] == "Windows") - ".zip" - else - ".tgz" - name <- sprintf("renv_%s%s", version, ext) - url <- paste(baseurl, name, sep = "/") - - destfile <- file.path(tempdir(), name) - status <- tryCatch( - renv_bootstrap_download_impl(url, destfile), - condition = identity - ) - - if (inherits(status, "condition")) - return(FALSE) - - # report success and return - destfile - - } - - renv_bootstrap_download_cran_latest_find <- function(version) { - - # check whether binaries are supported on this system - binary <- - getOption("renv.bootstrap.binary", default = TRUE) && - !identical(.Platform$pkgType, "source") && - !identical(getOption("pkgType"), "source") && - Sys.info()[["sysname"]] %in% c("Darwin", "Windows") - - types <- c(if (binary) "binary", "source") - - # iterate over types + repositories - for (type in types) { - for (repos in renv_bootstrap_repos()) { - - # retrieve package database - db <- tryCatch( - as.data.frame( - utils::available.packages(type = type, repos = repos), - stringsAsFactors = FALSE - ), - error = identity - ) - - if (inherits(db, "error")) - next - - # check for compatible entry - entry <- db[db$Package %in% "renv" & db$Version %in% version, ] - if (nrow(entry) == 0) - next - - # found it; return spec to caller - spec <- list(entry = entry, type = type, repos = repos) - return(spec) - - } - } - - # if we got here, we failed to find renv - fmt <- "renv %s is not available from your declared package repositories" - stop(sprintf(fmt, version)) - - } - - renv_bootstrap_download_cran_archive <- function(version) { - - name <- sprintf("renv_%s.tar.gz", version) - repos <- renv_bootstrap_repos() - urls <- file.path(repos, "src/contrib/Archive/renv", name) - destfile <- file.path(tempdir(), name) - - for (url in urls) { - - status <- tryCatch( - renv_bootstrap_download_impl(url, destfile), - condition = identity - ) - - if (identical(status, 0L)) - return(destfile) - - } - - return(FALSE) - - } - - renv_bootstrap_download_tarball <- function(version) { - - # if the user has provided the path to a tarball via - # an environment variable, then use it - tarball <- Sys.getenv("RENV_BOOTSTRAP_TARBALL", unset = NA) - if (is.na(tarball)) - return() - - # allow directories - if (dir.exists(tarball)) { - name <- sprintf("renv_%s.tar.gz", version) - tarball <- file.path(tarball, name) - } - - # bail if it doesn't exist - if (!file.exists(tarball)) { - - # let the user know we weren't able to honour their request - fmt <- "- RENV_BOOTSTRAP_TARBALL is set (%s) but does not exist." - msg <- sprintf(fmt, tarball) - warning(msg) - - # bail - return() - - } - - catf("- Using local tarball '%s'.", tarball) - tarball - - } - - renv_bootstrap_download_github <- function(version) { - - enabled <- Sys.getenv("RENV_BOOTSTRAP_FROM_GITHUB", unset = "TRUE") - if (!identical(enabled, "TRUE")) - return(FALSE) - - # prepare download options - pat <- Sys.getenv("GITHUB_PAT") - if (nzchar(Sys.which("curl")) && nzchar(pat)) { - fmt <- "--location --fail --header \"Authorization: token %s\"" - extra <- sprintf(fmt, pat) - saved <- options("download.file.method", "download.file.extra") - options(download.file.method = "curl", download.file.extra = extra) - on.exit(do.call(base::options, saved), add = TRUE) - } else if (nzchar(Sys.which("wget")) && nzchar(pat)) { - fmt <- "--header=\"Authorization: token %s\"" - extra <- sprintf(fmt, pat) - saved <- options("download.file.method", "download.file.extra") - options(download.file.method = "wget", download.file.extra = extra) - on.exit(do.call(base::options, saved), add = TRUE) - } - - url <- file.path("https://api.github.com/repos/rstudio/renv/tarball", version) - name <- sprintf("renv_%s.tar.gz", version) - destfile <- file.path(tempdir(), name) - - status <- tryCatch( - renv_bootstrap_download_impl(url, destfile), - condition = identity - ) - - if (!identical(status, 0L)) - return(FALSE) - - renv_bootstrap_download_augment(destfile) - - return(destfile) - - } - - # Add Sha to DESCRIPTION. This is stop gap until #890, after which we - # can use renv::install() to fully capture metadata. - renv_bootstrap_download_augment <- function(destfile) { - sha <- renv_bootstrap_git_extract_sha1_tar(destfile) - if (is.null(sha)) { - return() - } - - # Untar - tempdir <- tempfile("renv-github-") - on.exit(unlink(tempdir, recursive = TRUE), add = TRUE) - untar(destfile, exdir = tempdir) - pkgdir <- dir(tempdir, full.names = TRUE)[[1]] - - # Modify description - desc_path <- file.path(pkgdir, "DESCRIPTION") - desc_lines <- readLines(desc_path) - remotes_fields <- c( - "RemoteType: github", - "RemoteHost: api.github.com", - "RemoteRepo: renv", - "RemoteUsername: rstudio", - "RemotePkgRef: rstudio/renv", - paste("RemoteRef: ", sha), - paste("RemoteSha: ", sha) - ) - writeLines(c(desc_lines[desc_lines != ""], remotes_fields), con = desc_path) - - # Re-tar - local({ - old <- setwd(tempdir) - on.exit(setwd(old), add = TRUE) - - tar(destfile, compression = "gzip") - }) - invisible() - } - - # Extract the commit hash from a git archive. Git archives include the SHA1 - # hash as the comment field of the tarball pax extended header - # (see https://www.kernel.org/pub/software/scm/git/docs/git-archive.html) - # For GitHub archives this should be the first header after the default one - # (512 byte) header. - renv_bootstrap_git_extract_sha1_tar <- function(bundle) { - - # open the bundle for reading - # We use gzcon for everything because (from ?gzcon) - # > Reading from a connection which does not supply a 'gzip' magic - # > header is equivalent to reading from the original connection - conn <- gzcon(file(bundle, open = "rb", raw = TRUE)) - on.exit(close(conn)) - - # The default pax header is 512 bytes long and the first pax extended header - # with the comment should be 51 bytes long - # `52 comment=` (11 chars) + 40 byte SHA1 hash - len <- 0x200 + 0x33 - res <- rawToChar(readBin(conn, "raw", n = len)[0x201:len]) - - if (grepl("^52 comment=", res)) { - sub("52 comment=", "", res) - } else { - NULL - } - } - - renv_bootstrap_install <- function(version, tarball, library) { - - # attempt to install it into project library - dir.create(library, showWarnings = FALSE, recursive = TRUE) - output <- renv_bootstrap_install_impl(library, tarball) - - # check for successful install - status <- attr(output, "status") - if (is.null(status) || identical(status, 0L)) - return(status) - - # an error occurred; report it - header <- "installation of renv failed" - lines <- paste(rep.int("=", nchar(header)), collapse = "") - text <- paste(c(header, lines, output), collapse = "\n") - stop(text) - - } - - renv_bootstrap_install_impl <- function(library, tarball) { - - # invoke using system2 so we can capture and report output - bin <- R.home("bin") - exe <- if (Sys.info()[["sysname"]] == "Windows") "R.exe" else "R" - R <- file.path(bin, exe) - - args <- c( - "--vanilla", "CMD", "INSTALL", "--no-multiarch", - "-l", shQuote(path.expand(library)), - shQuote(path.expand(tarball)) - ) - - system2(R, args, stdout = TRUE, stderr = TRUE) - - } - - renv_bootstrap_platform_prefix <- function() { - - # construct version prefix - version <- paste(R.version$major, R.version$minor, sep = ".") - prefix <- paste("R", numeric_version(version)[1, 1:2], sep = "-") - - # include SVN revision for development versions of R - # (to avoid sharing platform-specific artefacts with released versions of R) - devel <- - identical(R.version[["status"]], "Under development (unstable)") || - identical(R.version[["nickname"]], "Unsuffered Consequences") - - if (devel) - prefix <- paste(prefix, R.version[["svn rev"]], sep = "-r") - - # build list of path components - components <- c(prefix, R.version$platform) - - # include prefix if provided by user - prefix <- renv_bootstrap_platform_prefix_impl() - if (!is.na(prefix) && nzchar(prefix)) - components <- c(prefix, components) - - # build prefix - paste(components, collapse = "/") - - } - - renv_bootstrap_platform_prefix_impl <- function() { - - # if an explicit prefix has been supplied, use it - prefix <- Sys.getenv("RENV_PATHS_PREFIX", unset = NA) - if (!is.na(prefix)) - return(prefix) - - # if the user has requested an automatic prefix, generate it - auto <- Sys.getenv("RENV_PATHS_PREFIX_AUTO", unset = NA) - if (auto %in% c("TRUE", "True", "true", "1")) - return(renv_bootstrap_platform_prefix_auto()) - - # empty string on failure - "" - - } - - renv_bootstrap_platform_prefix_auto <- function() { - - prefix <- tryCatch(renv_bootstrap_platform_os(), error = identity) - if (inherits(prefix, "error") || prefix %in% "unknown") { - - msg <- paste( - "failed to infer current operating system", - "please file a bug report at https://github.com/rstudio/renv/issues", - sep = "; " - ) - - warning(msg) - - } - - prefix - - } - - renv_bootstrap_platform_os <- function() { - - sysinfo <- Sys.info() - sysname <- sysinfo[["sysname"]] - - # handle Windows + macOS up front - if (sysname == "Windows") - return("windows") - else if (sysname == "Darwin") - return("macos") - - # check for os-release files - for (file in c("/etc/os-release", "/usr/lib/os-release")) - if (file.exists(file)) - return(renv_bootstrap_platform_os_via_os_release(file, sysinfo)) - - # check for redhat-release files - if (file.exists("/etc/redhat-release")) - return(renv_bootstrap_platform_os_via_redhat_release()) - - "unknown" - - } - - renv_bootstrap_platform_os_via_os_release <- function(file, sysinfo) { - - # read /etc/os-release - release <- utils::read.table( - file = file, - sep = "=", - quote = c("\"", "'"), - col.names = c("Key", "Value"), - comment.char = "#", - stringsAsFactors = FALSE - ) - - vars <- as.list(release$Value) - names(vars) <- release$Key - - # get os name - os <- tolower(sysinfo[["sysname"]]) - - # read id - id <- "unknown" - for (field in c("ID", "ID_LIKE")) { - if (field %in% names(vars) && nzchar(vars[[field]])) { - id <- vars[[field]] - break - } - } - - # read version - version <- "unknown" - for (field in c("UBUNTU_CODENAME", "VERSION_CODENAME", "VERSION_ID", "BUILD_ID")) { - if (field %in% names(vars) && nzchar(vars[[field]])) { - version <- vars[[field]] - break - } - } - - # join together - paste(c(os, id, version), collapse = "-") - - } - - renv_bootstrap_platform_os_via_redhat_release <- function() { - - # read /etc/redhat-release - contents <- readLines("/etc/redhat-release", warn = FALSE) - - # infer id - id <- if (grepl("centos", contents, ignore.case = TRUE)) - "centos" - else if (grepl("redhat", contents, ignore.case = TRUE)) - "redhat" - else - "unknown" - - # try to find a version component (very hacky) - version <- "unknown" - - parts <- strsplit(contents, "[[:space:]]")[[1L]] - for (part in parts) { - - nv <- tryCatch(numeric_version(part), error = identity) - if (inherits(nv, "error")) - next - - version <- nv[1, 1] - break - - } - - paste(c("linux", id, version), collapse = "-") - - } - - renv_bootstrap_library_root_name <- function(project) { - - # use project name as-is if requested - asis <- Sys.getenv("RENV_PATHS_LIBRARY_ROOT_ASIS", unset = "FALSE") - if (asis) - return(basename(project)) - - # otherwise, disambiguate based on project's path - id <- substring(renv_bootstrap_hash_text(project), 1L, 8L) - paste(basename(project), id, sep = "-") - - } - - renv_bootstrap_library_root <- function(project) { - - prefix <- renv_bootstrap_profile_prefix() - - path <- Sys.getenv("RENV_PATHS_LIBRARY", unset = NA) - if (!is.na(path)) - return(paste(c(path, prefix), collapse = "/")) - - path <- renv_bootstrap_library_root_impl(project) - if (!is.null(path)) { - name <- renv_bootstrap_library_root_name(project) - return(paste(c(path, prefix, name), collapse = "/")) - } - - renv_bootstrap_paths_renv("library", project = project) - - } - - renv_bootstrap_library_root_impl <- function(project) { - - root <- Sys.getenv("RENV_PATHS_LIBRARY_ROOT", unset = NA) - if (!is.na(root)) - return(root) - - type <- renv_bootstrap_project_type(project) - if (identical(type, "package")) { - userdir <- renv_bootstrap_user_dir() - return(file.path(userdir, "library")) - } - - } - - renv_bootstrap_validate_version <- function(version, description = NULL) { - - # resolve description file - # - # avoid passing lib.loc to `packageDescription()` below, since R will - # use the loaded version of the package by default anyhow. note that - # this function should only be called after 'renv' is loaded - # https://github.com/rstudio/renv/issues/1625 - description <- description %||% packageDescription("renv") - - # check whether requested version 'version' matches loaded version of renv - sha <- attr(version, "sha", exact = TRUE) - valid <- if (!is.null(sha)) - renv_bootstrap_validate_version_dev(sha, description) - else - renv_bootstrap_validate_version_release(version, description) - - if (valid) - return(TRUE) - - # the loaded version of renv doesn't match the requested version; - # give the user instructions on how to proceed - remote <- if (!is.null(description[["RemoteSha"]])) { - paste("rstudio/renv", description[["RemoteSha"]], sep = "@") - } else { - paste("renv", description[["Version"]], sep = "@") - } - - # display both loaded version + sha if available - friendly <- renv_bootstrap_version_friendly( - version = description[["Version"]], - sha = description[["RemoteSha"]] - ) - - fmt <- paste( - "renv %1$s was loaded from project library, but this project is configured to use renv %2$s.", - "- Use `renv::record(\"%3$s\")` to record renv %1$s in the lockfile.", - "- Use `renv::restore(packages = \"renv\")` to install renv %2$s into the project library.", - sep = "\n" - ) - catf(fmt, friendly, renv_bootstrap_version_friendly(version), remote) - - FALSE - - } - - renv_bootstrap_validate_version_dev <- function(version, description) { - expected <- description[["RemoteSha"]] - is.character(expected) && startswith(expected, version) - } - - renv_bootstrap_validate_version_release <- function(version, description) { - expected <- description[["Version"]] - is.character(expected) && identical(expected, version) - } - - renv_bootstrap_hash_text <- function(text) { - - hashfile <- tempfile("renv-hash-") - on.exit(unlink(hashfile), add = TRUE) - - writeLines(text, con = hashfile) - tools::md5sum(hashfile) - - } - - renv_bootstrap_load <- function(project, libpath, version) { - - # try to load renv from the project library - if (!requireNamespace("renv", lib.loc = libpath, quietly = TRUE)) - return(FALSE) - - # warn if the version of renv loaded does not match - renv_bootstrap_validate_version(version) - - # execute renv load hooks, if any - hooks <- getHook("renv::autoload") - for (hook in hooks) - if (is.function(hook)) - tryCatch(hook(), error = warnify) - - # load the project - renv::load(project) - - TRUE - - } - - renv_bootstrap_profile_load <- function(project) { - - # if RENV_PROFILE is already set, just use that - profile <- Sys.getenv("RENV_PROFILE", unset = NA) - if (!is.na(profile) && nzchar(profile)) - return(profile) - - # check for a profile file (nothing to do if it doesn't exist) - path <- renv_bootstrap_paths_renv("profile", profile = FALSE, project = project) - if (!file.exists(path)) - return(NULL) - - # read the profile, and set it if it exists - contents <- readLines(path, warn = FALSE) - if (length(contents) == 0L) - return(NULL) - - # set RENV_PROFILE - profile <- contents[[1L]] - if (!profile %in% c("", "default")) - Sys.setenv(RENV_PROFILE = profile) - - profile - - } - - renv_bootstrap_profile_prefix <- function() { - profile <- renv_bootstrap_profile_get() - if (!is.null(profile)) - return(file.path("profiles", profile, "renv")) - } - - renv_bootstrap_profile_get <- function() { - profile <- Sys.getenv("RENV_PROFILE", unset = "") - renv_bootstrap_profile_normalize(profile) - } - - renv_bootstrap_profile_set <- function(profile) { - profile <- renv_bootstrap_profile_normalize(profile) - if (is.null(profile)) - Sys.unsetenv("RENV_PROFILE") - else - Sys.setenv(RENV_PROFILE = profile) - } - - renv_bootstrap_profile_normalize <- function(profile) { - - if (is.null(profile) || profile %in% c("", "default")) - return(NULL) - - profile - - } - - renv_bootstrap_path_absolute <- function(path) { - - substr(path, 1L, 1L) %in% c("~", "/", "\\") || ( - substr(path, 1L, 1L) %in% c(letters, LETTERS) && - substr(path, 2L, 3L) %in% c(":/", ":\\") - ) - - } - - renv_bootstrap_paths_renv <- function(..., profile = TRUE, project = NULL) { - renv <- Sys.getenv("RENV_PATHS_RENV", unset = "renv") - root <- if (renv_bootstrap_path_absolute(renv)) NULL else project - prefix <- if (profile) renv_bootstrap_profile_prefix() - components <- c(root, renv, prefix, ...) - paste(components, collapse = "/") - } - - renv_bootstrap_project_type <- function(path) { - - descpath <- file.path(path, "DESCRIPTION") - if (!file.exists(descpath)) - return("unknown") - - desc <- tryCatch( - read.dcf(descpath, all = TRUE), - error = identity - ) - - if (inherits(desc, "error")) - return("unknown") - - type <- desc$Type - if (!is.null(type)) - return(tolower(type)) - - package <- desc$Package - if (!is.null(package)) - return("package") - - "unknown" - - } - - renv_bootstrap_user_dir <- function() { - dir <- renv_bootstrap_user_dir_impl() - path.expand(chartr("\\", "/", dir)) - } - - renv_bootstrap_user_dir_impl <- function() { - - # use local override if set - override <- getOption("renv.userdir.override") - if (!is.null(override)) - return(override) - - # use R_user_dir if available - tools <- asNamespace("tools") - if (is.function(tools$R_user_dir)) - return(tools$R_user_dir("renv", "cache")) - - # try using our own backfill for older versions of R - envvars <- c("R_USER_CACHE_DIR", "XDG_CACHE_HOME") - for (envvar in envvars) { - root <- Sys.getenv(envvar, unset = NA) - if (!is.na(root)) - return(file.path(root, "R/renv")) - } - - # use platform-specific default fallbacks - if (Sys.info()[["sysname"]] == "Windows") - file.path(Sys.getenv("LOCALAPPDATA"), "R/cache/R/renv") - else if (Sys.info()[["sysname"]] == "Darwin") - "~/Library/Caches/org.R-project.R/R/renv" - else - "~/.cache/R/renv" - - } - - renv_bootstrap_version_friendly <- function(version, shafmt = NULL, sha = NULL) { - sha <- sha %||% attr(version, "sha", exact = TRUE) - parts <- c(version, sprintf(shafmt %||% " [sha: %s]", substring(sha, 1L, 7L))) - paste(parts, collapse = "") - } - - renv_bootstrap_exec <- function(project, libpath, version) { - if (!renv_bootstrap_load(project, libpath, version)) - renv_bootstrap_run(version, libpath) - } - - renv_bootstrap_run <- function(version, libpath) { - - # perform bootstrap - bootstrap(version, libpath) - - # exit early if we're just testing bootstrap - if (!is.na(Sys.getenv("RENV_BOOTSTRAP_INSTALL_ONLY", unset = NA))) - return(TRUE) - - # try again to load - if (requireNamespace("renv", lib.loc = libpath, quietly = TRUE)) { - return(renv::load(project = getwd())) - } - - # failed to download or load renv; warn the user - msg <- c( - "Failed to find an renv installation: the project will not be loaded.", - "Use `renv::activate()` to re-initialize the project." - ) - - warning(paste(msg, collapse = "\n"), call. = FALSE) - - } - - renv_json_read <- function(file = NULL, text = NULL) { - - jlerr <- NULL - - # if jsonlite is loaded, use that instead - if ("jsonlite" %in% loadedNamespaces()) { - - json <- catch(renv_json_read_jsonlite(file, text)) - if (!inherits(json, "error")) - return(json) - - jlerr <- json - - } - - # otherwise, fall back to the default JSON reader - json <- catch(renv_json_read_default(file, text)) - if (!inherits(json, "error")) - return(json) - - # report an error - if (!is.null(jlerr)) - stop(jlerr) - else - stop(json) - - } - - renv_json_read_jsonlite <- function(file = NULL, text = NULL) { - text <- paste(text %||% read(file), collapse = "\n") - jsonlite::fromJSON(txt = text, simplifyVector = FALSE) - } - - renv_json_read_default <- function(file = NULL, text = NULL) { - - # find strings in the JSON - text <- paste(text %||% read(file), collapse = "\n") - pattern <- '["](?:(?:\\\\.)|(?:[^"\\\\]))*?["]' - locs <- gregexpr(pattern, text, perl = TRUE)[[1]] - - # if any are found, replace them with placeholders - replaced <- text - strings <- character() - replacements <- character() - - if (!identical(c(locs), -1L)) { - - # get the string values - starts <- locs - ends <- locs + attr(locs, "match.length") - 1L - strings <- substring(text, starts, ends) - - # only keep those requiring escaping - strings <- grep("[[\\]{}:]", strings, perl = TRUE, value = TRUE) - - # compute replacements - replacements <- sprintf('"\032%i\032"', seq_along(strings)) - - # replace the strings - mapply(function(string, replacement) { - replaced <<- sub(string, replacement, replaced, fixed = TRUE) - }, strings, replacements) - - } - - # transform the JSON into something the R parser understands - transformed <- replaced - transformed <- gsub("{}", "`names<-`(list(), character())", transformed, fixed = TRUE) - transformed <- gsub("[[{]", "list(", transformed, perl = TRUE) - transformed <- gsub("[]}]", ")", transformed, perl = TRUE) - transformed <- gsub(":", "=", transformed, fixed = TRUE) - text <- paste(transformed, collapse = "\n") - - # parse it - json <- parse(text = text, keep.source = FALSE, srcfile = NULL)[[1L]] - - # construct map between source strings, replaced strings - map <- as.character(parse(text = strings)) - names(map) <- as.character(parse(text = replacements)) - - # convert to list - map <- as.list(map) - - # remap strings in object - remapped <- renv_json_remap(json, map) - - # evaluate - eval(remapped, envir = baseenv()) - - } - - renv_json_remap <- function(json, map) { - - # fix names - if (!is.null(names(json))) { - lhs <- match(names(json), names(map), nomatch = 0L) - rhs <- match(names(map), names(json), nomatch = 0L) - names(json)[rhs] <- map[lhs] - } - - # fix values - if (is.character(json)) - return(map[[json]] %||% json) - - # handle true, false, null - if (is.name(json)) { - text <- as.character(json) - if (text == "true") - return(TRUE) - else if (text == "false") - return(FALSE) - else if (text == "null") - return(NULL) - } - - # recurse - if (is.recursive(json)) { - for (i in seq_along(json)) { - json[i] <- list(renv_json_remap(json[[i]], map)) - } - } - - json - - } - - # load the renv profile, if any - renv_bootstrap_profile_load(project) - - # construct path to library root - root <- renv_bootstrap_library_root(project) - - # construct library prefix for platform - prefix <- renv_bootstrap_platform_prefix() - - # construct full libpath - libpath <- file.path(root, prefix) - - # run bootstrap code - renv_bootstrap_exec(project, libpath, version) - - invisible() - -}) diff --git a/extras/TestModule1-0.0.1/renv/profiles/dev/renv.lock b/extras/TestModule1-0.0.1/renv/profiles/dev/renv.lock deleted file mode 100644 index aeb3539a..00000000 --- a/extras/TestModule1-0.0.1/renv/profiles/dev/renv.lock +++ /dev/null @@ -1,1093 +0,0 @@ -{ - "R": { - "Version": "4.2.3", - "Repositories": [ - { - "Name": "CRAN", - "URL": "https://packagemanager.posit.co/cran/latest" - } - ] - }, - "Packages": { - "CohortGenerator": { - "Package": "CohortGenerator", - "Version": "0.8.1", - "Source": "GitHub", - "RemoteType": "github", - "RemoteHost": "api.github.com", - "RemoteUsername": "OHDSI", - "RemoteRepo": "CohortGenerator", - "RemoteRef": "v0.8.1", - "RemoteSha": "78757f1b191a395cf9dcff0d5bbe2b9fa4aa163e" - }, - "DBI": { - "Package": "DBI", - "Version": "1.1.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "b2866e62bab9378c3cc9476a1954226b" - }, - "DatabaseConnector": { - "Package": "DatabaseConnector", - "Version": "6.3.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "DBI", - "R", - "SqlRender", - "bit64", - "checkmate", - "dbplyr", - "digest", - "methods", - "rJava", - "readr", - "rlang", - "stringr", - "urltools", - "utils" - ], - "Hash": "1ef65614602c6534a6c666e872c3b647" - }, - "Eunomia": { - "Package": "Eunomia", - "Version": "1.0.2", - "Source": "GitHub", - "RemoteType": "github", - "RemoteHost": "api.github.com", - "RemoteUsername": "OHDSI", - "RemoteRepo": "Eunomia", - "RemoteRef": "v1.0.2", - "RemoteSha": "e330860e581bcb33896ef1dbac29549224e0990c" - }, - "ParallelLogger": { - "Package": "ParallelLogger", - "Version": "3.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "jsonlite", - "methods", - "snow", - "utils", - "xml2" - ], - "Hash": "8d893bed8c8bfe21217464dd3f9ec3e9" - }, - "R6": { - "Package": "R6", - "Version": "2.5.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "470851b6d5d0ac559e9d01bb352b4021" - }, - "RJSONIO": { - "Package": "RJSONIO", - "Version": "1.3-1.8", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "methods" - ], - "Hash": "cd79d1874fb20217463451f8c310c526" - }, - "RSQLite": { - "Package": "RSQLite", - "Version": "2.3.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "DBI", - "R", - "bit64", - "blob", - "cpp11", - "memoise", - "methods", - "pkgconfig", - "plogr" - ], - "Hash": "207c90cd5438a1f596da2cd54c606fee" - }, - "Rcpp": { - "Package": "Rcpp", - "Version": "1.0.11", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "methods", - "utils" - ], - "Hash": "e749cae40fa9ef469b6050959517453c" - }, - "ResultModelManager": { - "Package": "ResultModelManager", - "Version": "0.5.6", - "Source": "GitHub", - "RemoteType": "github", - "RemoteHost": "api.github.com", - "RemoteUsername": "OHDSI", - "RemoteRepo": "ResultModelManager", - "RemoteRef": "v0.5.6", - "RemoteSha": "3033804e5af77b8b8dacda67c4d6853731e3641b" - }, - "SqlRender": { - "Package": "SqlRender", - "Version": "1.16.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "checkmate", - "rJava", - "rlang" - ], - "Hash": "94d9cae91bbd8aed211bea82aff7cf77" - }, - "askpass": { - "Package": "askpass", - "Version": "1.2.0", - "Source": "Repository", - "Repository": "CRAN" - }, - "assertthat": { - "Package": "assertthat", - "Version": "0.2.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "tools" - ], - "Hash": "50c838a310445e954bc13f26f26a6ecf" - }, - "backports": { - "Package": "backports", - "Version": "1.4.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "c39fbec8a30d23e721980b8afb31984c" - }, - "bit": { - "Package": "bit", - "Version": "4.0.5", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "d242abec29412ce988848d0294b208fd" - }, - "bit64": { - "Package": "bit64", - "Version": "4.0.5", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "bit", - "methods", - "stats", - "utils" - ], - "Hash": "9fe98599ca456d6552421db0d6772d8f" - }, - "blob": { - "Package": "blob", - "Version": "1.2.4", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "methods", - "rlang", - "vctrs" - ], - "Hash": "40415719b5a479b87949f3aa0aee737c" - }, - "brio": { - "Package": "brio", - "Version": "1.1.3", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "976cf154dfb043c012d87cddd8bca363" - }, - "cachem": { - "Package": "cachem", - "Version": "1.0.8", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "fastmap", - "rlang" - ], - "Hash": "cda74447c42f529de601fe4d4050daef" - }, - "callr": { - "Package": "callr", - "Version": "3.7.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "R6", - "processx", - "utils" - ], - "Hash": "9b2191ede20fa29828139b9900922e51" - }, - "checkmate": { - "Package": "checkmate", - "Version": "2.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "backports", - "utils" - ], - "Hash": "ed4275b13c6ab74b89a31def0b6bf835" - }, - "cli": { - "Package": "cli", - "Version": "3.6.1", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "utils" - ], - "Hash": "89e6d8219950eac806ae0c489052048a" - }, - "clipr": { - "Package": "clipr", - "Version": "0.8.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "utils" - ], - "Hash": "3f038e5ac7f41d4ac41ce658c85e3042" - }, - "cpp11": { - "Package": "cpp11", - "Version": "0.4.6", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "ed588261931ee3be2c700d22e94a29ab" - }, - "crayon": { - "Package": "crayon", - "Version": "1.5.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "grDevices", - "methods", - "utils" - ], - "Hash": "e8a1e41acf02548751f45c718d55aa6a" - }, - "dbplyr": { - "Package": "dbplyr", - "Version": "2.3.4", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "DBI", - "R", - "R6", - "blob", - "cli", - "dplyr", - "glue", - "lifecycle", - "magrittr", - "methods", - "pillar", - "purrr", - "rlang", - "tibble", - "tidyr", - "tidyselect", - "utils", - "vctrs", - "withr" - ], - "Hash": "d24305b92db333726aed162a2c23a147" - }, - "desc": { - "Package": "desc", - "Version": "1.4.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "R6", - "cli", - "rprojroot", - "utils" - ], - "Hash": "6b9602c7ebbe87101a9c8edb6e8b6d21" - }, - "diffobj": { - "Package": "diffobj", - "Version": "0.3.5", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "crayon", - "methods", - "stats", - "tools", - "utils" - ], - "Hash": "bcaa8b95f8d7d01a5dedfd959ce88ab8" - }, - "digest": { - "Package": "digest", - "Version": "0.6.33", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "utils" - ], - "Hash": "8b708f296afd9ae69f450f9640be8990" - }, - "dplyr": { - "Package": "dplyr", - "Version": "1.1.3", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "R6", - "cli", - "generics", - "glue", - "lifecycle", - "magrittr", - "methods", - "pillar", - "rlang", - "tibble", - "tidyselect", - "utils", - "vctrs" - ], - "Hash": "eb5742d256a0d9306d85ea68756d8187" - }, - "ellipsis": { - "Package": "ellipsis", - "Version": "0.3.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "rlang" - ], - "Hash": "bb0eec2fe32e88d9e2836c2f73ea2077" - }, - "evaluate": { - "Package": "evaluate", - "Version": "0.22", - "Source": "Repository", - "Repository": "CRAN" - }, - "fansi": { - "Package": "fansi", - "Version": "1.0.4", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "grDevices", - "utils" - ], - "Hash": "1d9e7ad3c8312a192dea7d3db0274fde" - }, - "fastmap": { - "Package": "fastmap", - "Version": "1.1.1", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "f7736a18de97dea803bde0a2daaafb27" - }, - "filelock": { - "Package": "filelock", - "Version": "1.0.2", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "38ec653c2613bed60052ba3787bd8a2c" - }, - "fs": { - "Package": "fs", - "Version": "1.6.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "f4dcd23b67e33d851d2079f703e8b985" - }, - "generics": { - "Package": "generics", - "Version": "0.1.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "15e9634c0fcd294799e9b2e929ed1b86" - }, - "glue": { - "Package": "glue", - "Version": "1.6.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "4f2596dfb05dac67b9dc558e5c6fba2e" - }, - "hms": { - "Package": "hms", - "Version": "1.1.3", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "lifecycle", - "methods", - "pkgconfig", - "rlang", - "vctrs" - ], - "Hash": "b59377caa7ed00fa41808342002138f9" - }, - "jsonlite": { - "Package": "jsonlite", - "Version": "1.8.7", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "methods" - ], - "Hash": "a4269a09a9b865579b2635c77e572374" - }, - "keyring": { - "Package": "keyring", - "Version": "1.3.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R6", - "askpass", - "assertthat", - "filelock", - "openssl", - "rappdirs", - "sodium", - "tools", - "utils", - "yaml" - ], - "Hash": "b7880ebefe188d62b099673bbc04afac" - }, - "later": { - "Package": "later", - "Version": "1.3.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "Rcpp", - "rlang" - ], - "Hash": "7e7b457d7766bc47f2a5f21cc2984f8e" - }, - "lifecycle": { - "Package": "lifecycle", - "Version": "1.0.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "glue", - "rlang" - ], - "Hash": "001cecbeac1cff9301bdc3775ee46a86" - }, - "lubridate": { - "Package": "lubridate", - "Version": "1.9.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "generics", - "methods", - "timechange" - ], - "Hash": "e25f18436e3efd42c7c590a1c4c15390" - }, - "magrittr": { - "Package": "magrittr", - "Version": "2.0.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "7ce2733a9826b3aeb1775d56fd305472" - }, - "memoise": { - "Package": "memoise", - "Version": "2.0.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "cachem", - "rlang" - ], - "Hash": "e2817ccf4a065c5d9d7f2cfbe7c1d78c" - }, - "openssl": { - "Package": "openssl", - "Version": "2.1.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "askpass" - ], - "Hash": "0f7cd2962e3044bb940cca4f4b5cecbe" - }, - "pillar": { - "Package": "pillar", - "Version": "1.9.0", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "cli", - "fansi", - "glue", - "lifecycle", - "rlang", - "utf8", - "utils", - "vctrs" - ], - "Hash": "15da5a8412f317beeee6175fbc76f4bb" - }, - "pkgconfig": { - "Package": "pkgconfig", - "Version": "2.0.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "utils" - ], - "Hash": "01f28d4278f15c76cddbea05899c5d6f" - }, - "pkgload": { - "Package": "pkgload", - "Version": "1.3.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "crayon", - "desc", - "fs", - "glue", - "methods", - "rlang", - "rprojroot", - "utils", - "withr" - ], - "Hash": "6b0c222c5071efe0f3baf3dae9aa40e2" - }, - "plogr": { - "Package": "plogr", - "Version": "0.2.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "09eb987710984fc2905c7129c7d85e65" - }, - "pool": { - "Package": "pool", - "Version": "1.0.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "DBI", - "R", - "R6", - "later", - "methods", - "rlang", - "withr" - ], - "Hash": "52d086ff1a2ccccbae6d462cb0773835" - }, - "praise": { - "Package": "praise", - "Version": "1.0.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "a555924add98c99d2f411e37e7d25e9f" - }, - "prettyunits": { - "Package": "prettyunits", - "Version": "1.2.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "95ef9167b75dde9d2ccc3c7528393e7e" - }, - "processx": { - "Package": "processx", - "Version": "3.8.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "R6", - "ps", - "utils" - ], - "Hash": "a33ee2d9bf07564efb888ad98410da84" - }, - "progress": { - "Package": "progress", - "Version": "1.2.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R6", - "crayon", - "hms", - "prettyunits" - ], - "Hash": "14dc9f7a3c91ebb14ec5bb9208a07061" - }, - "ps": { - "Package": "ps", - "Version": "1.7.5", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "utils" - ], - "Hash": "d88be14c6790aa6fd7b27a2079a45a85" - }, - "purrr": { - "Package": "purrr", - "Version": "1.0.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "lifecycle", - "magrittr", - "rlang", - "vctrs" - ], - "Hash": "d71c815267c640f17ddbf7f16144b4bb" - }, - "rJava": { - "Package": "rJava", - "Version": "1.0-6", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "0415819f6baa75d86d52483f7292b623" - }, - "rappdirs": { - "Package": "rappdirs", - "Version": "0.3.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "5e3c5dc0b071b21fa128676560dbe94d" - }, - "readr": { - "Package": "readr", - "Version": "2.1.4", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "R6", - "cli", - "clipr", - "cpp11", - "crayon", - "hms", - "lifecycle", - "methods", - "rlang", - "tibble", - "tzdb", - "utils", - "vroom" - ], - "Hash": "b5047343b3825f37ad9d3b5d89aa1078" - }, - "rematch2": { - "Package": "rematch2", - "Version": "2.1.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "tibble" - ], - "Hash": "76c9e04c712a05848ae7a23d2f170a40" - }, - "renv": { - "Package": "renv", - "Version": "1.0.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "utils" - ], - "Hash": "41b847654f567341725473431dd0d5ab" - }, - "rlang": { - "Package": "rlang", - "Version": "1.1.1", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "utils" - ], - "Hash": "dc079ccd156cde8647360f473c1fa718" - }, - "rprojroot": { - "Package": "rprojroot", - "Version": "2.0.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "1de7ab598047a87bba48434ba35d497d" - }, - "snow": { - "Package": "snow", - "Version": "0.4-4", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "utils" - ], - "Hash": "40b74690debd20c57d93d8c246b305d4" - }, - "sodium": { - "Package": "sodium", - "Version": "1.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "3606bb09e0914edd4fc8313b500dcd5e" - }, - "stringi": { - "Package": "stringi", - "Version": "1.7.12", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "stats", - "tools", - "utils" - ], - "Hash": "ca8bd84263c77310739d2cf64d84d7c9" - }, - "stringr": { - "Package": "stringr", - "Version": "1.5.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "glue", - "lifecycle", - "magrittr", - "rlang", - "stringi", - "vctrs" - ], - "Hash": "671a4d384ae9d32fc47a14e98bfa3dc8" - }, - "sys": { - "Package": "sys", - "Version": "3.4.2", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "34c16f1ef796057bfa06d3f4ff818a5d" - }, - "testthat": { - "Package": "testthat", - "Version": "3.1.10", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "R6", - "brio", - "callr", - "cli", - "desc", - "digest", - "ellipsis", - "evaluate", - "jsonlite", - "lifecycle", - "magrittr", - "methods", - "pkgload", - "praise", - "processx", - "ps", - "rlang", - "utils", - "waldo", - "withr" - ], - "Hash": "7eb5fd202a61d2fb78af5869b6c08998" - }, - "tibble": { - "Package": "tibble", - "Version": "3.2.1", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "fansi", - "lifecycle", - "magrittr", - "methods", - "pillar", - "pkgconfig", - "rlang", - "utils", - "vctrs" - ], - "Hash": "a84e2cc86d07289b3b6f5069df7a004c" - }, - "tidyr": { - "Package": "tidyr", - "Version": "1.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "cpp11", - "dplyr", - "glue", - "lifecycle", - "magrittr", - "purrr", - "rlang", - "stringr", - "tibble", - "tidyselect", - "utils", - "vctrs" - ], - "Hash": "e47debdc7ce599b070c8e78e8ac0cfcf" - }, - "tidyselect": { - "Package": "tidyselect", - "Version": "1.2.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "glue", - "lifecycle", - "rlang", - "vctrs", - "withr" - ], - "Hash": "79540e5fcd9e0435af547d885f184fd5" - }, - "timechange": { - "Package": "timechange", - "Version": "0.2.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cpp11" - ], - "Hash": "8548b44f79a35ba1791308b61e6012d7" - }, - "triebeard": { - "Package": "triebeard", - "Version": "0.4.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "Rcpp" - ], - "Hash": "642507a148b0dd9b5620177e0a044413" - }, - "tzdb": { - "Package": "tzdb", - "Version": "0.4.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cpp11" - ], - "Hash": "b2e1cbce7c903eaf23ec05c58e59fb5e" - }, - "urltools": { - "Package": "urltools", - "Version": "1.7.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "Rcpp", - "methods", - "triebeard" - ], - "Hash": "e86a704261a105f4703f653e05defa3e" - }, - "utf8": { - "Package": "utf8", - "Version": "1.2.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "1fe17157424bb09c48a8b3b550c753bc" - }, - "vctrs": { - "Package": "vctrs", - "Version": "0.6.3", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "cli", - "glue", - "lifecycle", - "rlang" - ], - "Hash": "06eceb3a5d716fd0654cc23ca3d71a99" - }, - "vroom": { - "Package": "vroom", - "Version": "1.6.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "bit64", - "cli", - "cpp11", - "crayon", - "glue", - "hms", - "lifecycle", - "methods", - "progress", - "rlang", - "stats", - "tibble", - "tidyselect", - "tzdb", - "vctrs", - "withr" - ], - "Hash": "7015a74373b83ffaef64023f4a0f5033" - }, - "waldo": { - "Package": "waldo", - "Version": "0.5.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "cli", - "diffobj", - "fansi", - "glue", - "methods", - "rematch2", - "rlang", - "tibble" - ], - "Hash": "035fba89d0c86e2113120f93301b98ad" - }, - "withr": { - "Package": "withr", - "Version": "2.5.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "grDevices", - "graphics", - "stats" - ], - "Hash": "c0e49a9760983e81e55cdd9be92e7182" - }, - "xml2": { - "Package": "xml2", - "Version": "1.3.5", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "40682ed6a969ea5abfd351eb67833adc" - }, - "yaml": { - "Package": "yaml", - "Version": "2.3.7", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "0d0056cc5383fbc240ccd0cb584bf436" - }, - "zip": { - "Package": "zip", - "Version": "2.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "d98c94dacb7e0efcf83b0a133a705504" - } - } -} diff --git a/extras/TestModule1-0.0.1/renv/profiles/dev/renv/.gitignore b/extras/TestModule1-0.0.1/renv/profiles/dev/renv/.gitignore deleted file mode 100644 index 0ec0cbba..00000000 --- a/extras/TestModule1-0.0.1/renv/profiles/dev/renv/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -library/ -local/ -cellar/ -lock/ -python/ -sandbox/ -staging/ diff --git a/extras/TestModule1-0.0.1/renv/profiles/dev/renv/settings.json b/extras/TestModule1-0.0.1/renv/profiles/dev/renv/settings.json deleted file mode 100644 index ffdbb320..00000000 --- a/extras/TestModule1-0.0.1/renv/profiles/dev/renv/settings.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "bioconductor.version": null, - "external.libraries": [], - "ignored.packages": [], - "package.dependency.fields": [ - "Imports", - "Depends", - "LinkingTo" - ], - "ppm.enabled": null, - "ppm.ignored.urls": [], - "r.version": null, - "snapshot.type": "implicit", - "use.cache": true, - "vcs.ignore.cellar": true, - "vcs.ignore.library": true, - "vcs.ignore.local": true, - "vcs.manage.ignores": true -} diff --git a/extras/TestModule1-0.0.1/renv/settings.json b/extras/TestModule1-0.0.1/renv/settings.json deleted file mode 100644 index ffdbb320..00000000 --- a/extras/TestModule1-0.0.1/renv/settings.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "bioconductor.version": null, - "external.libraries": [], - "ignored.packages": [], - "package.dependency.fields": [ - "Imports", - "Depends", - "LinkingTo" - ], - "ppm.enabled": null, - "ppm.ignored.urls": [], - "r.version": null, - "snapshot.type": "implicit", - "use.cache": true, - "vcs.ignore.cellar": true, - "vcs.ignore.library": true, - "vcs.ignore.local": true, - "vcs.manage.ignores": true -} diff --git a/extras/TestModule1-0.0.1/resultsDataModelSpecification.csv b/extras/TestModule1-0.0.1/resultsDataModelSpecification.csv deleted file mode 100644 index 38b8c8e3..00000000 --- a/extras/TestModule1-0.0.1/resultsDataModelSpecification.csv +++ /dev/null @@ -1,3 +0,0 @@ -table_name,column_name,data_type,is_required,primary_key,min_cell_count,description -unit_test,codeset_id,bigint,Yes,Yes,No,The codeset id -unit_test,concept_id,bigint,Yes,Yes,No,The concept id diff --git a/extras/TestModule1-0.0.1/tests/test-eunomia.R b/extras/TestModule1-0.0.1/tests/test-eunomia.R deleted file mode 100644 index 852af8ca..00000000 --- a/extras/TestModule1-0.0.1/tests/test-eunomia.R +++ /dev/null @@ -1,44 +0,0 @@ -# Use this profile when testing -#Sys.setenv(RENV_PROFILE = "dev") -#renv::restore(prompt = FALSE) -library(testthat) -library(Eunomia) -connectionDetails <- getEunomiaConnectionDetails() - -workFolder <- tempfile("work") -dir.create(workFolder) -resultsfolder <- tempfile("results") -dir.create(resultsfolder) -jobContext <- readRDS("tests/testJobContext.rds") -jobContext$moduleExecutionSettings$workSubFolder <- workFolder -jobContext$moduleExecutionSettings$resultsSubFolder <- resultsfolder -jobContext$moduleExecutionSettings$connectionDetails <- connectionDetails -jobContext$moduleExecutionSettings$resultsConnectionDetails <- connectionDetails -jobContext$moduleExecutionSettings$resultsDatabaseSchema <- jobContext$moduleExecutionSettings$workDatabaseSchema - -test_that("Test createDataModelSchema", { - source("Main.R") - createDataModelSchema(jobContext) - - # Verify that the table(s) are created - connection <- DatabaseConnector::connect( - connectionDetails = jobContext$moduleExecutionSettings$resultsConnectionDetails - ) - on.exit(DatabaseConnector::disconnect(connection)) - tableList <- DatabaseConnector::getTableNames( - connection = connection - ) - resultsTablesCreated <- tableList[grep(getModuleInfo()$TablePrefix, tableList)] - expect_true(length(resultsTablesCreated) > 0) -}) - -test_that("Run module", { - source("Main.R") - execute(jobContext) - resultsFiles <- list.files(resultsfolder) - expect_true("tm1_unit_test.csv" %in% resultsFiles) -}) - -unlink(workFolder) -unlink(resultsfolder) -unlink(connectionDetails$server()) diff --git a/extras/TestModule1-0.0.1/tests/testJobContext.rds b/extras/TestModule1-0.0.1/tests/testJobContext.rds deleted file mode 100644 index 0a515f82..00000000 Binary files a/extras/TestModule1-0.0.1/tests/testJobContext.rds and /dev/null differ diff --git a/extras/TestModule1-0.0.1/tests/testScript.R b/extras/TestModule1-0.0.1/tests/testScript.R deleted file mode 100644 index 02d8c189..00000000 --- a/extras/TestModule1-0.0.1/tests/testScript.R +++ /dev/null @@ -1,10 +0,0 @@ -library(testthat) - -testFiles <- list.files(file.path("tests"), "test-.*\\.R", full.names = TRUE) -for (testFile in testFiles) { - message(sprintf("*** Running tests in '%s' ***", testFile)) - source(testFile) -} - - -# Note: testthat default structure does not work for non-packages: https://github.com/r-lib/testthat/issues/1490 diff --git a/extras/ValidateModuleLockFiles.R b/extras/ValidateModuleLockFiles.R deleted file mode 100644 index f0033b2a..00000000 --- a/extras/ValidateModuleLockFiles.R +++ /dev/null @@ -1,24 +0,0 @@ -rootDir <- "C:/git/OHDSI" -moduleList <- c( - "Characterization", - "CohortDiagnostics", - "CohortGenerator", - "CohortIncidence", - "CohortMethod", - "EvidenceSynthesis", - "PatientLevelPrediction", - "SelfControlledCaseSeries" -) - -for (i in seq_along(moduleList)) { - repoPath <- file.path(rootDir, paste0(moduleList[i], "Module")) - if (dir.exists(repoPath)) { - cat("Checking ", repoPath, "\n") - cat(" -- Checking renv.lock file\n") - Strategus::validateLockFile(filename = file.path(repoPath, "renv.lock")) - cat(" -- Checking dev/renv.lock file\n") - Strategus::validateLockFile(filename = file.path(repoPath, "renv/profiles/dev/renv.lock")) - } else { - warning(paste0(repoPath, "NOT FOUND!!")) - } -} diff --git a/extras/testExecutionSettings.json b/extras/testExecutionSettings.json deleted file mode 100644 index fb1a9967..00000000 --- a/extras/testExecutionSettings.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "connectionDetailsReference": "prod-1-mdcd", - "workDatabaseSchema": "scratch_mschuemi", - "cdmDatabaseSchema": "cdm_truven_mdcd_v1978", - "cohortTableNames": { - "cohortTable": "strategus_test", - "cohortInclusionTable": "strategus_test_inclusion", - "cohortInclusionResultTable": "strategus_test_inclusion_result", - "cohortInclusionStatsTable": "strategus_test_inclusion_stats", - "cohortSummaryStatsTable": "strategus_test_summary_stats", - "cohortCensorStatsTable": "strategus_test_censor_stats" - }, - "workFolder": "c:/temp/strategusWork", - "resultsFolder": "c:/temp/strategusOutput", - "minCellCount": 5, - "attr_class": "ExecutionSettings" -} diff --git a/inst/csv/cohortIncidenceRdms.csv b/inst/csv/cohortIncidenceRdms.csv new file mode 100644 index 00000000..714c82b9 --- /dev/null +++ b/inst/csv/cohortIncidenceRdms.csv @@ -0,0 +1,48 @@ +table_name,column_name,data_type,is_required,primary_key +incidence_summary,ref_id ,int,no,no +incidence_summary,database_id,varchar(255),yes,no +incidence_summary,source_name,varchar(255),no,no +incidence_summary,target_cohort_definition_id,bigint,no,no +incidence_summary,tar_id,bigint,no,no +incidence_summary,subgroup_id,bigint,no,no +incidence_summary,outcome_id,bigint,no,no +incidence_summary,age_group_id,int,no,no +incidence_summary,gender_id,int,no,no +incidence_summary,gender_name,varchar(255),no,no +incidence_summary,start_year,int,no,no +incidence_summary,persons_at_risk_pe,bigint,no,no +incidence_summary,persons_at_risk,bigint,no,no +incidence_summary,person_days_pe,bigint,no,no +incidence_summary,person_days,bigint,no,no +incidence_summary,person_outcomes_pe,bigint,no,no +incidence_summary,person_outcomes,bigint,no,no +incidence_summary,outcomes_pe,bigint,no,no +incidence_summary,outcomes,bigint,no,no +incidence_summary,incidence_proportion_p100p,float,no,no +incidence_summary,incidence_rate_p100py,float,no,no +target_def,ref_id,int,yes,yes +target_def,target_cohort_definition_id,bigint,yes,yes +target_def,target_name,varchar(255),no,no +outcome_def,ref_id,int,yes,yes +outcome_def,outcome_id,bigint,yes,yes +outcome_def,outcome_cohort_definition_id,bigint,no,no +outcome_def,outcome_name,varchar(255),no,no +outcome_def,clean_window,bigint,no,no +outcome_def,excluded_cohort_definition_id,bigint,no,no +tar_def,ref_id,int,yes,yes +tar_def,tar_id,bigint,yes,yes +tar_def,tar_start_with,varchar(10),no,no +tar_def,tar_start_offset,bigint,no,no +tar_def,tar_end_with,varchar(10),no,no +tar_def,tar_end_offset,bigint,no,no +age_group_def,ref_id,int,yes,yes +age_group_def,age_group_id,int,yes,yes +age_group_def,age_group_name,varchar(255),yes,no +age_group_def,min_age,int,no,no +age_group_def,max_age,int,no,no +subgroup_def,ref_id,int,yes,yes +subgroup_def,subgroup_id,bigint,no,yes +subgroup_def,subgroup_name,varchar(255),no,no +target_outcome_ref,ref_id,int,yes,yes +target_outcome_ref,target_cohort_id,bigint,yes,yes +target_outcome_ref,outcome_cohort_id,bigint,yes,yes diff --git a/inst/databaseMetaDataRdms.csv b/inst/csv/databaseMetaDataRdms.csv similarity index 100% rename from inst/databaseMetaDataRdms.csv rename to inst/csv/databaseMetaDataRdms.csv diff --git a/inst/csv/evidenceSynthesisRdms.csv b/inst/csv/evidenceSynthesisRdms.csv new file mode 100644 index 00000000..82e0f0d2 --- /dev/null +++ b/inst/csv/evidenceSynthesisRdms.csv @@ -0,0 +1,85 @@ +table_name,column_name,data_type,is_required,primary_key,min_cell_count,description +es_analysis,evidence_synthesis_analysis_id,int,Yes,Yes,No,A unique identifier for the evidence synthesis analysis. +es_analysis,evidence_synthesis_description,varchar(255),Yes,No,No,A description of the evidence synthesis analysis. +es_analysis,source_method,varchar(100),Yes,No,No,The method used to produce the source estimates (e.g. 'CohortMethod'). +es_analysis,definition,varchar,Yes,No,No,A JSON string representing the settings of the evidence synthesis analysis. +es_cm_diagnostics_summary,target_id,int,Yes,Yes,No,The identifier for the target cohort. +es_cm_diagnostics_summary,comparator_id,int,Yes,Yes,No,The identifier for the comparator cohort. +es_cm_diagnostics_summary,outcome_id,int,Yes,Yes,No,The identifier for the outcome cohort. +es_cm_diagnostics_summary,analysis_id,int,Yes,Yes,No,A unique identifier for the cohort method analysis. +es_cm_diagnostics_summary,evidence_synthesis_analysis_id,int,Yes,Yes,No,A foreign key referencing the es_analysis table. +es_cm_diagnostics_summary,mdrr,float,No,No,No,The minimum detectable relative risk. +es_cm_diagnostics_summary,i_2,float,No,No,No,The I2 statistics for heterogeneity. +es_cm_diagnostics_summary,tau,float,No,No,No,The estimated tau (standard deviation of the random-effects distribution). +es_cm_diagnostics_summary,ease,float,No,No,No,The expected absolute systematic error. +es_cm_diagnostics_summary,mdrr_diagnostic,varchar(13),Yes,No,No,PASS/ NOT EVALUATED / FAIL classification of the MDRR diagnostic. +es_cm_diagnostics_summary,i_2_diagnostic,varchar(13),Yes,No,No,PASS/ NOT EVALUATED / FAIL classification of the I2 diagnostic. +es_cm_diagnostics_summary,tau_diagnostic,varchar(13),Yes,No,No,PPASS/ NOT EVALUATED / FAIL classification of the tau diagnostic. +es_cm_diagnostics_summary,ease_diagnostic,varchar(13),Yes,No,No,PASS/ NOT EVALUATED / FAIL classification of the EASE diagnostic. +es_cm_diagnostics_summary,unblind,int,Yes,No,No,"Is unblinding the result recommended? (1 = yes, 0 = no)" +es_cm_result,target_id,int,Yes,Yes,No,The identifier for the target cohort. +es_cm_result,comparator_id,int,Yes,Yes,No,The identifier for the comparator cohort. +es_cm_result,outcome_id,int,Yes,Yes,No,The identifier for the outcome cohort. +es_cm_result,analysis_id,int,Yes,Yes,No,A unique identifier for the cohort method analysis. +es_cm_result,evidence_synthesis_analysis_id,int,Yes,Yes,No,A foreign key referencing the es_analysis table. +es_cm_result,rr,float,No,No,No,The estimated relative risk (e.g. the hazard ratio). +es_cm_result,ci_95_lb,float,No,No,No,The lower bound of the 95% confidence interval of the relative risk. +es_cm_result,ci_95_ub,float,No,No,No,The upper bound of the 95% confidence interval of the relative risk. +es_cm_result,p,float,No,No,No,The two-sided p-value considering the null hypothesis of no effect. +es_cm_result,one_sided_p,float,No,No,No,The one-sided p-value considering the null hypothesis of RR <= 1. +es_cm_result,log_rr,float,No,No,No,The log of the relative risk. +es_cm_result,se_log_rr,float,No,No,No,The standard error of the log of the relative risk. +es_cm_result,target_subjects,int,No,No,Yes,The number of subject in the target cohort. +es_cm_result,comparator_subjects,int,No,No,Yes,The number of subject in the comparator cohort. +es_cm_result,target_days,int,No,No,No,The number of days observed in the target cohort. +es_cm_result,comparator_days,int,No,No,No,The number of days observed in the comparator cohort. +es_cm_result,target_outcomes,int,No,No,Yes,The number of outcomes observed in the target cohort. +es_cm_result,comparator_outcomes,int,No,No,Yes,The number of outcomes observed in the comparator cohort. +es_cm_result,n_databases,int,Yes,No,No,The number of databases that contributed to the meta-analytic estimate. +es_cm_result,calibrated_rr,float,No,No,No,The calibrated relative risk. +es_cm_result,calibrated_ci_95_lb,float,No,No,No,The lower bound of the calibrated 95% confidence interval of the relative risk. +es_cm_result,calibrated_ci_95_ub,float,No,No,No,The upper bound of the calibrated 95% confidence interval of the relative risk. +es_cm_result,calibrated_p,float,No,No,No,The calibrated two-sided p-value. +es_cm_result,calibrated_one_sided_p,float,No,No,No,The calibrated one-sided p-value considering the null hypothesis of RR <= 1. +es_cm_result,calibrated_log_rr,float,No,No,No,The log of the calibrated relative risk. +es_cm_result,calibrated_se_log_rr,float,No,No,No,The standard error of the log of the calibrated relative risk. +es_sccs_diagnostics_summary,exposures_outcome_set_id,int,Yes,Yes,No,A foreign key referencing the sccs_exposures_outcome_set table. +es_sccs_diagnostics_summary,covariate_id,int,Yes,Yes,No,A foreign key referencing the sccs_covariate table. The identifier for the covariate of interest. +es_sccs_diagnostics_summary,analysis_id,int,Yes,Yes,No,A unique identifier for the cohort method analysis. +es_sccs_diagnostics_summary,evidence_synthesis_analysis_id,int,Yes,Yes,No,A foreign key referencing the es_analysis table. +es_sccs_diagnostics_summary,mdrr,float,No,No,No,The minimum detectable relative risk. +es_sccs_diagnostics_summary,i_2,float,No,No,No,The I2 statistics for heterogeneity. +es_sccs_diagnostics_summary,tau,float,No,No,No,The estimated tau (standard deviation of the random-effects distribution). +es_sccs_diagnostics_summary,ease,float,No,No,No,The expected absolute systematic error. +es_sccs_diagnostics_summary,mdrr_diagnostic,varchar(13),Yes,No,No,PASS/ NOT EVALUATED / FAIL classification of the MDRR diagnostic. +es_sccs_diagnostics_summary,i_2_diagnostic,varchar(13),Yes,No,No,Pass / warning / fail classification of the I2 diagnostic. +es_sccs_diagnostics_summary,tau_diagnostic,varchar(13),Yes,No,No,Pass / warning / fail classification of the tau diagnostic. +es_sccs_diagnostics_summary,ease_diagnostic,varchar(13),Yes,No,No,Pass / warning / fail classification of the EASE diagnostic. +es_sccs_diagnostics_summary,unblind,int,Yes,No,No,"Is unblinding the result recommended? (1 = yes, 0 = no)" +es_sccs_result,analysis_id,int,Yes,Yes,No,A foreign key referencing the sccs_analysis table. +es_sccs_result,exposures_outcome_set_id,int,Yes,Yes,No,A foreign key referencing the sccs_exposures_outcome_set table. +es_sccs_result,covariate_id,int,Yes,Yes,No,A foreign key referencing the sccs_covariate table. The identifier for the covariate of interest. +es_sccs_result,evidence_synthesis_analysis_id,int,Yes,Yes,No,A foreign key referencing the es_analysis table. +es_sccs_result,rr,float,No,No,No,The estimated relative risk (i.e. the incidence rate ratio). +es_sccs_result,ci_95_lb,float,No,No,No,The lower bound of the 95% confidence interval of the relative risk. +es_sccs_result,ci_95_ub,float,No,No,No,The upper bound of the 95% confidence interval of the relative risk. +es_sccs_result,p,float,No,No,No,The two-sided p-value considering the null hypothesis of no effect. +es_sccs_result,one_sided_p,float,No,No,No,The one-sided p-value considering the null hypothesis of RR <= 1. +es_sccs_result,outcome_subjects,int,Yes,No,Yes,The number of subjects with at least one outcome. +es_sccs_result,outcome_events,int,Yes,No,Yes,The number of outcome events. +es_sccs_result,outcome_observation_periods,int,Yes,No,Yes,The number of observation periods containing at least one outcome. +es_sccs_result,covariate_subjects,int,Yes,No,Yes,The number of subjects having the covariate. +es_sccs_result,covariate_days,int,Yes,No,Yes,The total covariate time in days. +es_sccs_result,covariate_eras,int,Yes,No,Yes,The number of continuous eras of the covariate. +es_sccs_result,covariate_outcomes,int,Yes,No,Yes,The number of outcomes observed during the covariate time. +es_sccs_result,observed_days,int,Yes,No,Yes,The number of days subjects were observed. +es_sccs_result,n_databases,int,Yes,No,No,The number of databases that contributed to the meta-analytic estimate. +es_sccs_result,log_rr,float,No,No,No,The log of the relative risk. +es_sccs_result,se_log_rr,float,No,No,No,The standard error of the log of the relative risk. +es_sccs_result,calibrated_rr,float,No,No,No,The calibrated relative risk. +es_sccs_result,calibrated_ci_95_lb,float,No,No,No,The lower bound of the calibrated 95% confidence interval of the relative risk. +es_sccs_result,calibrated_ci_95_ub,float,No,No,No,The upper bound of the calibrated 95% confidence interval of the relative risk. +es_sccs_result,calibrated_p,float,No,No,No,The calibrated two-sided p-value. +es_sccs_result,calibrated_one_sided_p,float,No,No,No,The calibrated one-sided p-value considering the null hypothesis of RR <= 1. +es_sccs_result,calibrated_log_rr,float,No,No,No,The log of the calibrated relative risk. +es_sccs_result,calibrated_se_log_rr,float,No,No,No,The standard error of the log of the calibrated relative risk. diff --git a/inst/csv/modules.csv b/inst/csv/modules.csv deleted file mode 100644 index 7b43cd44..00000000 --- a/inst/csv/modules.csv +++ /dev/null @@ -1,9 +0,0 @@ -module,version,remote_repo,remote_username,module_type,main_package,main_package_tag -CharacterizationModule,v0.6.0,github.com,OHDSI,cdm,Characterization,v0.2.0 -CohortDiagnosticsModule,v0.2.0,github.com,OHDSI,cdm,CohortDiagnostics,v3.2.5 -CohortGeneratorModule,v0.4.1,github.com,OHDSI,cdm,CohortGenerator,v0.9.0 -CohortIncidenceModule,v0.4.1,github.com,OHDSI,cdm,CohortIncidence,v3.3.0 -CohortMethodModule,v0.3.1,github.com,OHDSI,cdm,CohortMethod,v5.3.0 -PatientLevelPredictionModule,v0.3.0,github.com,OHDSI,cdm,PatientLevelPrediction,v6.3.6 -SelfControlledCaseSeriesModule,v0.5.0,github.com,OHDSI,cdm,SelfControlledCaseSeries,v5.2.0 -EvidenceSynthesisModule,v0.6.1,github.com,OHDSI,results,EvidenceSynthesis,v0.5.0 diff --git a/inst/doc/CreatingAnalysisSpecification.pdf b/inst/doc/CreatingAnalysisSpecification.pdf deleted file mode 100644 index e3d7dd2a..00000000 Binary files a/inst/doc/CreatingAnalysisSpecification.pdf and /dev/null differ diff --git a/inst/doc/CreatingModules.pdf b/inst/doc/CreatingModules.pdf deleted file mode 100644 index edeb8a32..00000000 Binary files a/inst/doc/CreatingModules.pdf and /dev/null differ diff --git a/inst/doc/ExecuteStrategus.pdf b/inst/doc/ExecuteStrategus.pdf deleted file mode 100644 index 666c055a..00000000 Binary files a/inst/doc/ExecuteStrategus.pdf and /dev/null differ diff --git a/inst/testdata/TestModule1_0.0.1.zip b/inst/testdata/TestModule1_0.0.1.zip deleted file mode 100644 index e93236a0..00000000 Binary files a/inst/testdata/TestModule1_0.0.1.zip and /dev/null differ diff --git a/inst/testdata/analysisSpecification.json b/inst/testdata/cdmModulesAnalysisSpecifications.json similarity index 87% rename from inst/testdata/analysisSpecification.json rename to inst/testdata/cdmModulesAnalysisSpecifications.json index 9c4557d0..0867d091 100644 --- a/inst/testdata/analysisSpecification.json +++ b/inst/testdata/cdmModulesAnalysisSpecifications.json @@ -28,6 +28,36 @@ "cohortDefinition": "{\r\n\t\"cdmVersionRange\" : \">=5.0.0\",\r\n\t\"PrimaryCriteria\" : {\r\n\t\t\"CriteriaList\" : [\r\n\t\t\t{\r\n\t\t\t\t\"DrugEra\" : {\r\n\t\t\t\t\t\"CodesetId\" : 0\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t],\r\n\t\t\"ObservationWindow\" : {\r\n\t\t\t\"PriorDays\" : 0,\r\n\t\t\t\"PostDays\" : 0\r\n\t\t},\r\n\t\t\"PrimaryCriteriaLimit\" : {\r\n\t\t\t\"Type\" : \"First\"\r\n\t\t}\r\n\t},\r\n\t\"ConceptSets\" : [\r\n\t\t{\r\n\t\t\t\"id\" : 0,\r\n\t\t\t\"name\" : \"Diclofenac\",\r\n\t\t\t\"expression\" : {\r\n\t\t\t\t\"items\" : [\r\n\t\t\t\t\t{\r\n\t\t\t\t\t\t\"concept\" : {\r\n\t\t\t\t\t\t\t\"CONCEPT_ID\" : 1124300,\r\n\t\t\t\t\t\t\t\"CONCEPT_NAME\" : \"diclofenac\",\r\n\t\t\t\t\t\t\t\"STANDARD_CONCEPT\" : \"S\",\r\n\t\t\t\t\t\t\t\"STANDARD_CONCEPT_CAPTION\" : \"Standard\",\r\n\t\t\t\t\t\t\t\"INVALID_REASON\" : \"V\",\r\n\t\t\t\t\t\t\t\"INVALID_REASON_CAPTION\" : \"Valid\",\r\n\t\t\t\t\t\t\t\"CONCEPT_CODE\" : \"3355\",\r\n\t\t\t\t\t\t\t\"DOMAIN_ID\" : \"Drug\",\r\n\t\t\t\t\t\t\t\"VOCABULARY_ID\" : \"RxNorm\",\r\n\t\t\t\t\t\t\t\"CONCEPT_CLASS_ID\" : \"Ingredient\"\r\n\t\t\t\t\t\t},\r\n\t\t\t\t\t\t\"isExcluded\" : false,\r\n\t\t\t\t\t\t\"includeDescendants\" : false,\r\n\t\t\t\t\t\t\"includeMapped\" : false\r\n\t\t\t\t\t}\r\n\t\t\t\t]\r\n\t\t\t}\r\n\t\t}\r\n\t],\r\n\t\"QualifiedLimit\" : {\r\n\t\t\"Type\" : \"First\"\r\n\t},\r\n\t\"ExpressionLimit\" : {\r\n\t\t\"Type\" : \"First\"\r\n\t},\r\n\t\"InclusionRules\" : [\r\n\t\t{\r\n\t\t\t\"name\" : \"Age >= 30\",\r\n\t\t\t\"expression\" : {\r\n\t\t\t\t\"Type\" : \"ALL\",\r\n\t\t\t\t\"CriteriaList\" : [],\r\n\t\t\t\t\"DemographicCriteriaList\" : [\r\n\t\t\t\t\t{\r\n\t\t\t\t\t\t\"Age\" : {\r\n\t\t\t\t\t\t\t\"Value\" : 30,\r\n\t\t\t\t\t\t\t\"Op\" : \"gte\"\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t],\r\n\t\t\t\t\"Groups\" : []\r\n\t\t\t}\r\n\t\t}\r\n\t],\r\n\t\"EndStrategy\" : {\r\n\t\t\"CustomEra\" : {\r\n\t\t\t\"DrugCodesetId\" : 0,\r\n\t\t\t\"GapDays\" : 30,\r\n\t\t\t\"Offset\" : 0\r\n\t\t}\r\n\t},\r\n\t\"CensoringCriteria\" : [],\r\n\t\"CollapseSettings\" : {\r\n\t\t\"CollapseType\" : \"ERA\",\r\n\t\t\"EraPad\" : 0\r\n\t},\r\n\t\"CensorWindow\" : {}\r\n}" } ], + "subsetDefs": [ + "{\n \"name\": \"test definition\",\n \"definitionId\": 1,\n \"subsetOperators\": [\n {\n \"name\": \"Demographic Criteria\",\n \"subsetType\": \"DemographicSubsetOperator\",\n \"ageMin\": 18,\n \"ageMax\": 64\n }\n ],\n \"packageVersion\": \"0.9.0\",\n \"identifierExpression\": \"targetId * 1000 + definitionId\",\n \"operatorNameConcatString\": \", \",\n \"subsetCohortNameTemplate\": \"@baseCohortName - @subsetDefinitionName @operatorNames\"\n}" + ], + "cohortSubsets": [ + { + "cohortId": 1001, + "subsetId": 1, + "targetCohortId": 1 + }, + { + "cohortId": 2001, + "subsetId": 1, + "targetCohortId": 2 + }, + { + "cohortId": 3001, + "subsetId": 1, + "targetCohortId": 3 + }, + { + "cohortId": 4001, + "subsetId": 1, + "targetCohortId": 4 + }, + { + "cohortId": 5001, + "subsetId": 1, + "targetCohortId": 5 + } + ], "attr_class": ["CohortDefinitionSharedResources", "SharedResources"] }, { @@ -214,161 +244,15 @@ "outcomeConceptId": " 197236" } ], - "occurrenceType": "all", + "occurrenceType": "first", "detectOnDescendants": true }, "attr_class": ["NegativeControlOutcomeSharedResources", "SharedResources"] } ], "moduleSpecifications": [ - { - "module": "CohortGeneratorModule", - "version": "0.3.0", - "remoteRepo": "github.com", - "remoteUsername": "ohdsi", - "settings": { - "incremental": true, - "generateStats": true - }, - "attr_class": ["CohortGeneratorModuleSpecifications", "ModuleSpecifications"] - }, - { - "module": "CohortDiagnosticsModule", - "version": "0.2.0", - "remoteRepo": "github.com", - "remoteUsername": "ohdsi", - "settings": { - "runInclusionStatistics": true, - "runIncludedSourceConcepts": true, - "runOrphanConcepts": true, - "runTimeSeries": false, - "runVisitContext": true, - "runBreakdownIndexEvents": true, - "runIncidenceRate": true, - "runCohortRelationship": true, - "runTemporalCohortCharacterization": true, - "temporalCovariateSettings": { - "temporal": true, - "temporalSequence": false, - "DemographicsGender": true, - "DemographicsAge": true, - "DemographicsAgeGroup": true, - "DemographicsRace": true, - "DemographicsEthnicity": true, - "DemographicsIndexYear": true, - "DemographicsIndexMonth": true, - "DemographicsPriorObservationTime": true, - "DemographicsPostObservationTime": true, - "DemographicsTimeInCohort": true, - "DemographicsIndexYearMonth": true, - "ConditionOccurrence": true, - "ConditionEraStart": true, - "ConditionEraOverlap": true, - "ConditionEraGroupOverlap": true, - "DrugEraStart": true, - "DrugEraGroupOverlap": true, - "ProcedureOccurrence": true, - "DeviceExposure": true, - "Measurement": true, - "Observation": true, - "CharlsonIndex": true, - "Dcsi": true, - "Chads2": true, - "Chads2Vasc": true, - "temporalStartDays": [-9999, -365, -180, -30, -365, -30, 0, 1, 31, -9999], - "temporalEndDays": [0, 0, 0, 0, -31, -1, 0, 30, 365, 9999], - "includedCovariateConceptIds": [], - "addDescendantsToInclude": false, - "excludedCovariateConceptIds": [], - "addDescendantsToExclude": false, - "includedCovariateIds": [], - "attr_class": "covariateSettings", - "attr_fun": "getDbDefaultCovariateData" - }, - "minCharacterizationMean": 0.01, - "irWashoutPeriod": 0, - "incremental": false - }, - "attr_class": ["CohortDiagnosticsModuleSpecifications", "ModuleSpecifications"] - }, - { - "module": "CohortIncidenceModule", - "version": "0.4.0", - "remoteRepo": "github.com", - "remoteUsername": "ohdsi", - "settings": { - "irDesign": { - "targetDefs": [ - { - "id": 1, - "name": "Celecoxib" - }, - { - "id": 2, - "name": "Diclofenac" - }, - { - "id": 4, - "name": "Celecoxib Age >= 30" - }, - { - "id": 5, - "name": "Diclofenac Age >= 30" - } - ], - "outcomeDefs": [ - { - "id": 1, - "name": "GI bleed", - "cohortId": 3, - "cleanWindow": 9999 - } - ], - "timeAtRiskDefs": [ - { - "id": 1, - "start": { - "dateField": "start", - "offset": 0 - }, - "end": { - "dateField": "end", - "offset": 0 - } - }, - { - "id": 2, - "start": { - "dateField": "start", - "offset": 0 - }, - "end": { - "dateField": "start", - "offset": 365 - } - } - ], - "analysisList": [ - { - "targets": [1, 2, 4, 5], - "outcomes": 1, - "tars": [1, 2] - } - ], - "strataSettings": { - "byAge": false, - "byGender": true, - "byYear": true - } - } - }, - "attr_class": ["CohortIncidenceModuleSpecifications", "ModuleSpecifications"] - }, { "module": "CharacterizationModule", - "version": "0.5.0", - "remoteRepo": "github.com", - "remoteUsername": "ohdsi", "settings": { "timeToEventSettings": [ { @@ -434,6 +318,7 @@ "attr_class": "covariateSettings", "attr_fun": "getDbDefaultCovariateData" }, + "minCharacterizationMean": 0, "attr_class": "aggregateCovariateSettings" }, { @@ -483,18 +368,80 @@ "attr_class": "covariateSettings", "attr_fun": "getDbDefaultCovariateData" }, + "minCharacterizationMean": 0, "attr_class": "aggregateCovariateSettings" } ], "attr_class": "characterizationSettings" }, - "attr_class": ["CharacterizationModuleSpecifications", "ModuleSpecifications"] + "attr_class": ["ModuleSpecifications", "CharacterizationModuleSpecifications"] + }, + { + "module": "CohortDiagnosticsModule", + "settings": { + "runInclusionStatistics": true, + "runIncludedSourceConcepts": true, + "runOrphanConcepts": true, + "runTimeSeries": false, + "runVisitContext": true, + "runBreakdownIndexEvents": true, + "runIncidenceRate": true, + "runCohortRelationship": true, + "runTemporalCohortCharacterization": true, + "temporalCovariateSettings": { + "temporal": true, + "temporalSequence": false, + "DemographicsGender": true, + "DemographicsAge": true, + "DemographicsAgeGroup": true, + "DemographicsRace": true, + "DemographicsEthnicity": true, + "DemographicsIndexYear": true, + "DemographicsIndexMonth": true, + "DemographicsPriorObservationTime": true, + "DemographicsPostObservationTime": true, + "DemographicsTimeInCohort": true, + "DemographicsIndexYearMonth": true, + "ConditionOccurrence": true, + "ConditionEraStart": true, + "ConditionEraOverlap": true, + "ConditionEraGroupOverlap": true, + "DrugEraStart": true, + "DrugEraGroupOverlap": true, + "ProcedureOccurrence": true, + "DeviceExposure": true, + "Measurement": true, + "Observation": true, + "CharlsonIndex": true, + "Dcsi": true, + "Chads2": true, + "Chads2Vasc": true, + "temporalStartDays": [-9999, -365, -180, -30, -365, -30, 0, 1, 31, -9999], + "temporalEndDays": [0, 0, 0, 0, -31, -1, 0, 30, 365, 9999], + "includedCovariateConceptIds": [], + "addDescendantsToInclude": false, + "excludedCovariateConceptIds": [], + "addDescendantsToExclude": false, + "includedCovariateIds": [], + "attr_class": "covariateSettings", + "attr_fun": "getDbDefaultCovariateData" + }, + "minCharacterizationMean": 0.01, + "irWashoutPeriod": 0, + "incremental": false + }, + "attr_class": ["ModuleSpecifications", "CohortDiagnosticsModuleSpecifications"] + }, + { + "module": "CohortGeneratorModule", + "settings": { + "incremental": true, + "generateStats": true + }, + "attr_class": ["ModuleSpecifications", "CohortGeneratorModuleSpecifications"] }, { "module": "CohortMethodModule", - "version": "0.3.0", - "remoteRepo": "github.com", - "remoteUsername": "ohdsi", "settings": { "cmAnalysisList": [ { @@ -603,6 +550,8 @@ "initialBound": 2, "maxBoundCount": 5, "algorithm": "ccd", + "doItAll": true, + "syncCV": false, "attr_class": "cyclopsControl" }, "attr_class": "args" @@ -713,6 +662,8 @@ "initialBound": 2, "maxBoundCount": 5, "algorithm": "ccd", + "doItAll": true, + "syncCV": false, "attr_class": "cyclopsControl" }, "estimator": "att", @@ -744,9 +695,8 @@ "covariateIds": [] }, "default": [], - "delim": "," - }, - "attr_problems": {} + "skip": 1 + } }, "attr_class": "args" }, @@ -788,6 +738,8 @@ "initialBound": 2, "maxBoundCount": 5, "algorithm": "ccd", + "doItAll": true, + "syncCV": false, "attr_class": "cyclopsControl" }, "attr_class": "args" @@ -1340,13 +1292,10 @@ "attr_class": "CmDiagnosticThresholds" } }, - "attr_class": ["CohortMethodModuleSpecifications", "ModuleSpecifications"] + "attr_class": ["ModuleSpecifications", "CohortMethodModuleSpecifications"] }, { "module": "SelfControlledCaseSeriesModule", - "version": "0.4.1", - "remoteRepo": "github.com", - "remoteUsername": "ohdsi", "settings": { "sccsAnalysisList": [ { @@ -1452,6 +1401,8 @@ "initialBound": 2, "maxBoundCount": 5, "algorithm": "ccd", + "doItAll": true, + "syncCV": false, "attr_class": "cyclopsControl" }, "profileBounds": [-2.3026, 2.3026], @@ -2359,290 +2310,289 @@ "attr_class": "SccsDiagnosticThresholds" } }, - "attr_class": ["SelfControlledCaseSeriesModuleSpecifications", "ModuleSpecifications"] + "attr_class": ["ModuleSpecifications", "SelfControlledCaseSeriesModuleSpecifications"] }, { "module": "PatientLevelPredictionModule", - "version": "0.3.0", - "remoteRepo": "github.com", - "remoteUsername": "ohdsi", - "settings": [ - { - "targetId": 1, - "outcomeId": 3, - "restrictPlpDataSettings": { - "studyStartDate": "", - "studyEndDate": "", - "firstExposureOnly": false, - "washoutPeriod": 0, - "sampleSize": null, - "attr_class": "restrictPlpDataSettings" - }, - "covariateSettings": { - "temporal": false, - "temporalSequence": false, - "DemographicsGender": true, - "DemographicsAgeGroup": true, - "DemographicsRace": true, - "DemographicsEthnicity": true, - "DemographicsIndexYear": true, - "DemographicsIndexMonth": true, - "ConditionGroupEraLongTerm": true, - "ConditionGroupEraShortTerm": true, - "DrugGroupEraLongTerm": true, - "DrugGroupEraShortTerm": true, - "DrugGroupEraOverlapping": true, - "ProcedureOccurrenceLongTerm": true, - "ProcedureOccurrenceShortTerm": true, - "DeviceExposureLongTerm": true, - "DeviceExposureShortTerm": true, - "MeasurementLongTerm": true, - "MeasurementShortTerm": true, - "MeasurementRangeGroupLongTerm": true, - "ObservationLongTerm": true, - "ObservationShortTerm": true, - "CharlsonIndex": true, - "Dcsi": true, - "Chads2": true, - "Chads2Vasc": true, - "includedCovariateConceptIds": [], - "includedCovariateIds": [], - "addDescendantsToInclude": false, - "excludedCovariateConceptIds": [], - "addDescendantsToExclude": false, - "shortTermStartDays": -30, - "mediumTermStartDays": -180, - "endDays": 0, - "longTermStartDays": -365, - "attr_class": "covariateSettings", - "attr_fun": "getDbDefaultCovariateData" - }, - "populationSettings": { - "binary": true, - "includeAllOutcomes": true, - "firstExposureOnly": false, - "washoutPeriod": 0, - "removeSubjectsWithPriorOutcome": true, - "priorOutcomeLookback": 99999, - "requireTimeAtRisk": true, - "minTimeAtRisk": 1, - "riskWindowStart": 1, - "startAnchor": "cohort start", - "riskWindowEnd": 365, - "endAnchor": "cohort start", - "restrictTarToCohortEnd": false, - "attr_class": "populationSettings" - }, - "sampleSettings": [ - { - "numberOutcomestoNonOutcomes": 1, - "sampleSeed": 1, - "attr_class": "sampleSettings", - "attr_fun": "sameData" - } - ], - "featureEngineeringSettings": [ - { - "attr_class": "featureEngineeringSettings", - "attr_fun": "sameData" - } - ], - "preprocessSettings": { - "minFraction": 0.001, - "normalize": true, - "removeRedundancy": true, - "attr_class": "preprocessSettings" - }, - "modelSettings": { - "fitFunction": "fitCyclopsModel", - "param": { - "priorParams": { - "priorType": "laplace", - "forceIntercept": false, - "variance": 0.01, - "exclude": 0 - }, - "includeCovariateIds": null, - "upperLimit": 20, - "lowerLimit": 0.01, - "priorCoefs": null, - "attr_settings": { - "priorfunction": "Cyclops::createPrior", - "selectorType": "byPid", - "crossValidationInPrior": true, - "modelType": "logistic", - "addIntercept": true, - "useControl": true, - "seed": 15721621, - "name": "Lasso Logistic Regression", - "threads": -1, - "tolerance": 2e-06, - "cvRepetitions": 1, - "maxIterations": 3000 + "settings": { + "modelDesignList": [ + { + "targetId": 1, + "outcomeId": 3, + "restrictPlpDataSettings": { + "studyStartDate": "", + "studyEndDate": "", + "firstExposureOnly": false, + "washoutPeriod": 0, + "sampleSize": null, + "attr_class": "restrictPlpDataSettings" + }, + "covariateSettings": { + "temporal": false, + "temporalSequence": false, + "DemographicsGender": true, + "DemographicsAgeGroup": true, + "DemographicsRace": true, + "DemographicsEthnicity": true, + "DemographicsIndexYear": true, + "DemographicsIndexMonth": true, + "ConditionGroupEraLongTerm": true, + "ConditionGroupEraShortTerm": true, + "DrugGroupEraLongTerm": true, + "DrugGroupEraShortTerm": true, + "DrugGroupEraOverlapping": true, + "ProcedureOccurrenceLongTerm": true, + "ProcedureOccurrenceShortTerm": true, + "DeviceExposureLongTerm": true, + "DeviceExposureShortTerm": true, + "MeasurementLongTerm": true, + "MeasurementShortTerm": true, + "MeasurementRangeGroupLongTerm": true, + "ObservationLongTerm": true, + "ObservationShortTerm": true, + "CharlsonIndex": true, + "Dcsi": true, + "Chads2": true, + "Chads2Vasc": true, + "includedCovariateConceptIds": [], + "includedCovariateIds": [], + "addDescendantsToInclude": false, + "excludedCovariateConceptIds": [], + "addDescendantsToExclude": false, + "shortTermStartDays": -30, + "mediumTermStartDays": -180, + "endDays": 0, + "longTermStartDays": -365, + "attr_class": "covariateSettings", + "attr_fun": "getDbDefaultCovariateData" + }, + "populationSettings": { + "binary": true, + "includeAllOutcomes": true, + "firstExposureOnly": false, + "washoutPeriod": 0, + "removeSubjectsWithPriorOutcome": true, + "priorOutcomeLookback": 99999, + "requireTimeAtRisk": true, + "minTimeAtRisk": 1, + "riskWindowStart": 1, + "startAnchor": "cohort start", + "riskWindowEnd": 365, + "endAnchor": "cohort start", + "restrictTarToCohortEnd": false, + "attr_class": "populationSettings" + }, + "sampleSettings": [ + { + "numberOutcomestoNonOutcomes": 1, + "sampleSeed": 1, + "attr_class": "sampleSettings", + "attr_fun": "sameData" + } + ], + "featureEngineeringSettings": [ + { + "attr_class": "featureEngineeringSettings", + "attr_fun": "sameData" + } + ], + "preprocessSettings": { + "minFraction": 0.001, + "normalize": true, + "removeRedundancy": true, + "attr_class": "preprocessSettings" + }, + "modelSettings": { + "fitFunction": "fitCyclopsModel", + "param": { + "priorParams": { + "priorType": "laplace", + "forceIntercept": false, + "variance": 0.01, + "exclude": 0 + }, + "includeCovariateIds": null, + "upperLimit": 20, + "lowerLimit": 0.01, + "priorCoefs": null, + "attr_settings": { + "priorfunction": "Cyclops::createPrior", + "selectorType": "byPid", + "crossValidationInPrior": true, + "modelType": "logistic", + "addIntercept": true, + "useControl": true, + "seed": 99660466, + "name": "Lasso Logistic Regression", + "threads": -1, + "tolerance": 2e-06, + "cvRepetitions": 1, + "maxIterations": 3000 + }, + "attr_modelType": "binary", + "attr_saveType": "RtoJson" }, - "attr_modelType": "binary", - "attr_saveType": "RtoJson" + "attr_class": "modelSettings" }, - "attr_class": "modelSettings" - }, - "splitSettings": { - "test": 0.25, - "train": 0.75, - "seed": 78595, - "nfold": 3, - "attr_class": "splitSettings", - "attr_fun": "randomSplitter" - }, - "executeSettings": { - "runSplitData": true, - "runSampleData": false, - "runfeatureEngineering": false, - "runPreprocessData": true, - "runModelDevelopment": true, - "runCovariateSummary": true, - "attr_class": "executeSettings" - }, - "attr_class": "modelDesign" - }, - { - "targetId": 2, - "outcomeId": 3, - "restrictPlpDataSettings": { - "studyStartDate": "", - "studyEndDate": "", - "firstExposureOnly": false, - "washoutPeriod": 0, - "sampleSize": null, - "attr_class": "restrictPlpDataSettings" - }, - "covariateSettings": { - "temporal": false, - "temporalSequence": false, - "DemographicsGender": true, - "DemographicsAgeGroup": true, - "DemographicsRace": true, - "DemographicsEthnicity": true, - "DemographicsIndexYear": true, - "DemographicsIndexMonth": true, - "ConditionGroupEraLongTerm": true, - "ConditionGroupEraShortTerm": true, - "DrugGroupEraLongTerm": true, - "DrugGroupEraShortTerm": true, - "DrugGroupEraOverlapping": true, - "ProcedureOccurrenceLongTerm": true, - "ProcedureOccurrenceShortTerm": true, - "DeviceExposureLongTerm": true, - "DeviceExposureShortTerm": true, - "MeasurementLongTerm": true, - "MeasurementShortTerm": true, - "MeasurementRangeGroupLongTerm": true, - "ObservationLongTerm": true, - "ObservationShortTerm": true, - "CharlsonIndex": true, - "Dcsi": true, - "Chads2": true, - "Chads2Vasc": true, - "includedCovariateConceptIds": [], - "includedCovariateIds": [], - "addDescendantsToInclude": false, - "excludedCovariateConceptIds": [], - "addDescendantsToExclude": false, - "shortTermStartDays": -30, - "mediumTermStartDays": -180, - "endDays": 0, - "longTermStartDays": -365, - "attr_class": "covariateSettings", - "attr_fun": "getDbDefaultCovariateData" - }, - "populationSettings": { - "binary": true, - "includeAllOutcomes": true, - "firstExposureOnly": false, - "washoutPeriod": 0, - "removeSubjectsWithPriorOutcome": true, - "priorOutcomeLookback": 99999, - "requireTimeAtRisk": true, - "minTimeAtRisk": 1, - "riskWindowStart": 1, - "startAnchor": "cohort start", - "riskWindowEnd": 365, - "endAnchor": "cohort start", - "restrictTarToCohortEnd": false, - "attr_class": "populationSettings" - }, - "sampleSettings": [ - { - "numberOutcomestoNonOutcomes": 1, - "sampleSeed": 1, - "attr_class": "sampleSettings", - "attr_fun": "sameData" - } - ], - "featureEngineeringSettings": [ - { - "attr_class": "featureEngineeringSettings", - "attr_fun": "sameData" - } - ], - "preprocessSettings": { - "minFraction": 0.001, - "normalize": true, - "removeRedundancy": true, - "attr_class": "preprocessSettings" - }, - "modelSettings": { - "fitFunction": "fitCyclopsModel", - "param": { - "priorParams": { - "priorType": "laplace", - "forceIntercept": false, - "variance": 0.01, - "exclude": 0 - }, - "includeCovariateIds": null, - "upperLimit": 20, - "lowerLimit": 0.01, - "priorCoefs": null, - "attr_settings": { - "priorfunction": "Cyclops::createPrior", - "selectorType": "byPid", - "crossValidationInPrior": true, - "modelType": "logistic", - "addIntercept": true, - "useControl": true, - "seed": 85843933, - "name": "Lasso Logistic Regression", - "threads": -1, - "tolerance": 2e-06, - "cvRepetitions": 1, - "maxIterations": 3000 + "splitSettings": { + "test": 0.25, + "train": 0.75, + "seed": 98421, + "nfold": 3, + "attr_class": "splitSettings", + "attr_fun": "randomSplitter" + }, + "executeSettings": { + "runSplitData": true, + "runSampleData": false, + "runfeatureEngineering": false, + "runPreprocessData": true, + "runModelDevelopment": true, + "runCovariateSummary": true, + "attr_class": "executeSettings" + }, + "attr_class": "modelDesign" + }, + { + "targetId": 2, + "outcomeId": 3, + "restrictPlpDataSettings": { + "studyStartDate": "", + "studyEndDate": "", + "firstExposureOnly": false, + "washoutPeriod": 0, + "sampleSize": null, + "attr_class": "restrictPlpDataSettings" + }, + "covariateSettings": { + "temporal": false, + "temporalSequence": false, + "DemographicsGender": true, + "DemographicsAgeGroup": true, + "DemographicsRace": true, + "DemographicsEthnicity": true, + "DemographicsIndexYear": true, + "DemographicsIndexMonth": true, + "ConditionGroupEraLongTerm": true, + "ConditionGroupEraShortTerm": true, + "DrugGroupEraLongTerm": true, + "DrugGroupEraShortTerm": true, + "DrugGroupEraOverlapping": true, + "ProcedureOccurrenceLongTerm": true, + "ProcedureOccurrenceShortTerm": true, + "DeviceExposureLongTerm": true, + "DeviceExposureShortTerm": true, + "MeasurementLongTerm": true, + "MeasurementShortTerm": true, + "MeasurementRangeGroupLongTerm": true, + "ObservationLongTerm": true, + "ObservationShortTerm": true, + "CharlsonIndex": true, + "Dcsi": true, + "Chads2": true, + "Chads2Vasc": true, + "includedCovariateConceptIds": [], + "includedCovariateIds": [], + "addDescendantsToInclude": false, + "excludedCovariateConceptIds": [], + "addDescendantsToExclude": false, + "shortTermStartDays": -30, + "mediumTermStartDays": -180, + "endDays": 0, + "longTermStartDays": -365, + "attr_class": "covariateSettings", + "attr_fun": "getDbDefaultCovariateData" + }, + "populationSettings": { + "binary": true, + "includeAllOutcomes": true, + "firstExposureOnly": false, + "washoutPeriod": 0, + "removeSubjectsWithPriorOutcome": true, + "priorOutcomeLookback": 99999, + "requireTimeAtRisk": true, + "minTimeAtRisk": 1, + "riskWindowStart": 1, + "startAnchor": "cohort start", + "riskWindowEnd": 365, + "endAnchor": "cohort start", + "restrictTarToCohortEnd": false, + "attr_class": "populationSettings" + }, + "sampleSettings": [ + { + "numberOutcomestoNonOutcomes": 1, + "sampleSeed": 1, + "attr_class": "sampleSettings", + "attr_fun": "sameData" + } + ], + "featureEngineeringSettings": [ + { + "attr_class": "featureEngineeringSettings", + "attr_fun": "sameData" + } + ], + "preprocessSettings": { + "minFraction": 0.001, + "normalize": true, + "removeRedundancy": true, + "attr_class": "preprocessSettings" + }, + "modelSettings": { + "fitFunction": "fitCyclopsModel", + "param": { + "priorParams": { + "priorType": "laplace", + "forceIntercept": false, + "variance": 0.01, + "exclude": 0 + }, + "includeCovariateIds": null, + "upperLimit": 20, + "lowerLimit": 0.01, + "priorCoefs": null, + "attr_settings": { + "priorfunction": "Cyclops::createPrior", + "selectorType": "byPid", + "crossValidationInPrior": true, + "modelType": "logistic", + "addIntercept": true, + "useControl": true, + "seed": 61501935, + "name": "Lasso Logistic Regression", + "threads": -1, + "tolerance": 2e-06, + "cvRepetitions": 1, + "maxIterations": 3000 + }, + "attr_modelType": "binary", + "attr_saveType": "RtoJson" }, - "attr_modelType": "binary", - "attr_saveType": "RtoJson" + "attr_class": "modelSettings" }, - "attr_class": "modelSettings" - }, - "splitSettings": { - "test": 0.25, - "train": 0.75, - "seed": 729, - "nfold": 3, - "attr_class": "splitSettings", - "attr_fun": "randomSplitter" - }, - "executeSettings": { - "runSplitData": true, - "runSampleData": false, - "runfeatureEngineering": false, - "runPreprocessData": true, - "runModelDevelopment": true, - "runCovariateSummary": true, - "attr_class": "executeSettings" - }, - "attr_class": "modelDesign" - } - ], - "attr_class": ["PatientLevelPredictionModuleSpecifications", "ModuleSpecifications"] + "splitSettings": { + "test": 0.25, + "train": 0.75, + "seed": 56231, + "nfold": 3, + "attr_class": "splitSettings", + "attr_fun": "randomSplitter" + }, + "executeSettings": { + "runSplitData": true, + "runSampleData": false, + "runfeatureEngineering": false, + "runPreprocessData": true, + "runModelDevelopment": true, + "runCovariateSummary": true, + "attr_class": "executeSettings" + }, + "attr_class": "modelDesign" + } + ] + }, + "attr_class": ["ModuleSpecifications", "PatientLevelPredictionModuleSpecifications"] } ], "attr_class": "AnalysisSpecifications" diff --git a/inst/testdata/renv.lock b/inst/testdata/renv.lock deleted file mode 100644 index 5b9b102d..00000000 --- a/inst/testdata/renv.lock +++ /dev/null @@ -1,887 +0,0 @@ -{ - "R": { - "Version": "4.2.3", - "Repositories": [ - { - "Name": "CRAN", - "URL": "https://packagemanager.posit.co/cran/latest" - } - ] - }, - "Packages": { - "CohortGenerator": { - "Package": "CohortGenerator", - "Version": "0.8.1", - "Source": "GitHub", - "RemoteType": "github", - "RemoteHost": "api.github.com", - "RemoteUsername": "OHDSI", - "RemoteRepo": "CohortGenerator", - "RemoteRef": "v0.8.1", - "RemoteSha": "78757f1b191a395cf9dcff0d5bbe2b9fa4aa163e" - }, - "DBI": { - "Package": "DBI", - "Version": "1.1.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "b2866e62bab9378c3cc9476a1954226b" - }, - "DatabaseConnector": { - "Package": "DatabaseConnector", - "Version": "6.3.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "DBI", - "R", - "SqlRender", - "bit64", - "checkmate", - "dbplyr", - "digest", - "methods", - "rJava", - "readr", - "rlang", - "stringr", - "urltools", - "utils" - ], - "Hash": "1ef65614602c6534a6c666e872c3b647" - }, - "ParallelLogger": { - "Package": "ParallelLogger", - "Version": "3.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "jsonlite", - "methods", - "snow", - "utils", - "xml2" - ], - "Hash": "8d893bed8c8bfe21217464dd3f9ec3e9" - }, - "R6": { - "Package": "R6", - "Version": "2.5.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "470851b6d5d0ac559e9d01bb352b4021" - }, - "RJSONIO": { - "Package": "RJSONIO", - "Version": "1.3-1.8", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "methods" - ], - "Hash": "cd79d1874fb20217463451f8c310c526" - }, - "RSQLite": { - "Package": "RSQLite", - "Version": "2.3.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "DBI", - "R", - "bit64", - "blob", - "cpp11", - "memoise", - "methods", - "pkgconfig", - "plogr" - ], - "Hash": "207c90cd5438a1f596da2cd54c606fee" - }, - "Rcpp": { - "Package": "Rcpp", - "Version": "1.0.11", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "methods", - "utils" - ], - "Hash": "e749cae40fa9ef469b6050959517453c" - }, - "ResultModelManager": { - "Package": "ResultModelManager", - "Version": "0.5.6", - "Source": "GitHub", - "RemoteType": "github", - "RemoteHost": "api.github.com", - "RemoteUsername": "OHDSI", - "RemoteRepo": "ResultModelManager", - "RemoteRef": "v0.5.6", - "RemoteSha": "3033804e5af77b8b8dacda67c4d6853731e3641b" - }, - "SqlRender": { - "Package": "SqlRender", - "Version": "1.16.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "checkmate", - "rJava", - "rlang" - ], - "Hash": "94d9cae91bbd8aed211bea82aff7cf77" - }, - "askpass": { - "Package": "askpass", - "Version": "1.2.0", - "Source": "Repository", - "Repository": "CRAN" - }, - "assertthat": { - "Package": "assertthat", - "Version": "0.2.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "tools" - ], - "Hash": "50c838a310445e954bc13f26f26a6ecf" - }, - "backports": { - "Package": "backports", - "Version": "1.4.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "c39fbec8a30d23e721980b8afb31984c" - }, - "bit": { - "Package": "bit", - "Version": "4.0.5", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "d242abec29412ce988848d0294b208fd" - }, - "bit64": { - "Package": "bit64", - "Version": "4.0.5", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "bit", - "methods", - "stats", - "utils" - ], - "Hash": "9fe98599ca456d6552421db0d6772d8f" - }, - "blob": { - "Package": "blob", - "Version": "1.2.4", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "methods", - "rlang", - "vctrs" - ], - "Hash": "40415719b5a479b87949f3aa0aee737c" - }, - "cachem": { - "Package": "cachem", - "Version": "1.0.8", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "fastmap", - "rlang" - ], - "Hash": "cda74447c42f529de601fe4d4050daef" - }, - "checkmate": { - "Package": "checkmate", - "Version": "2.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "backports", - "utils" - ], - "Hash": "ed4275b13c6ab74b89a31def0b6bf835" - }, - "cli": { - "Package": "cli", - "Version": "3.6.1", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "utils" - ], - "Hash": "89e6d8219950eac806ae0c489052048a" - }, - "clipr": { - "Package": "clipr", - "Version": "0.8.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "utils" - ], - "Hash": "3f038e5ac7f41d4ac41ce658c85e3042" - }, - "cpp11": { - "Package": "cpp11", - "Version": "0.4.6", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "ed588261931ee3be2c700d22e94a29ab" - }, - "crayon": { - "Package": "crayon", - "Version": "1.5.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "grDevices", - "methods", - "utils" - ], - "Hash": "e8a1e41acf02548751f45c718d55aa6a" - }, - "dbplyr": { - "Package": "dbplyr", - "Version": "2.3.4", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "DBI", - "R", - "R6", - "blob", - "cli", - "dplyr", - "glue", - "lifecycle", - "magrittr", - "methods", - "pillar", - "purrr", - "rlang", - "tibble", - "tidyr", - "tidyselect", - "utils", - "vctrs", - "withr" - ], - "Hash": "d24305b92db333726aed162a2c23a147" - }, - "digest": { - "Package": "digest", - "Version": "0.6.33", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "utils" - ], - "Hash": "8b708f296afd9ae69f450f9640be8990" - }, - "dplyr": { - "Package": "dplyr", - "Version": "1.1.3", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "R6", - "cli", - "generics", - "glue", - "lifecycle", - "magrittr", - "methods", - "pillar", - "rlang", - "tibble", - "tidyselect", - "utils", - "vctrs" - ], - "Hash": "eb5742d256a0d9306d85ea68756d8187" - }, - "fansi": { - "Package": "fansi", - "Version": "1.0.4", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "grDevices", - "utils" - ], - "Hash": "1d9e7ad3c8312a192dea7d3db0274fde" - }, - "fastmap": { - "Package": "fastmap", - "Version": "1.1.1", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "f7736a18de97dea803bde0a2daaafb27" - }, - "filelock": { - "Package": "filelock", - "Version": "1.0.2", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "38ec653c2613bed60052ba3787bd8a2c" - }, - "generics": { - "Package": "generics", - "Version": "0.1.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "15e9634c0fcd294799e9b2e929ed1b86" - }, - "glue": { - "Package": "glue", - "Version": "1.6.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "4f2596dfb05dac67b9dc558e5c6fba2e" - }, - "hms": { - "Package": "hms", - "Version": "1.1.3", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "lifecycle", - "methods", - "pkgconfig", - "rlang", - "vctrs" - ], - "Hash": "b59377caa7ed00fa41808342002138f9" - }, - "jsonlite": { - "Package": "jsonlite", - "Version": "1.8.7", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "methods" - ], - "Hash": "a4269a09a9b865579b2635c77e572374" - }, - "keyring": { - "Package": "keyring", - "Version": "1.3.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R6", - "askpass", - "assertthat", - "filelock", - "openssl", - "rappdirs", - "sodium", - "tools", - "utils", - "yaml" - ], - "Hash": "b7880ebefe188d62b099673bbc04afac" - }, - "later": { - "Package": "later", - "Version": "1.3.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "Rcpp", - "rlang" - ], - "Hash": "7e7b457d7766bc47f2a5f21cc2984f8e" - }, - "lifecycle": { - "Package": "lifecycle", - "Version": "1.0.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "glue", - "rlang" - ], - "Hash": "001cecbeac1cff9301bdc3775ee46a86" - }, - "lubridate": { - "Package": "lubridate", - "Version": "1.9.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "generics", - "methods", - "timechange" - ], - "Hash": "e25f18436e3efd42c7c590a1c4c15390" - }, - "magrittr": { - "Package": "magrittr", - "Version": "2.0.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "7ce2733a9826b3aeb1775d56fd305472" - }, - "memoise": { - "Package": "memoise", - "Version": "2.0.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "cachem", - "rlang" - ], - "Hash": "e2817ccf4a065c5d9d7f2cfbe7c1d78c" - }, - "openssl": { - "Package": "openssl", - "Version": "2.1.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "askpass" - ], - "Hash": "0f7cd2962e3044bb940cca4f4b5cecbe" - }, - "pillar": { - "Package": "pillar", - "Version": "1.9.0", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "cli", - "fansi", - "glue", - "lifecycle", - "rlang", - "utf8", - "utils", - "vctrs" - ], - "Hash": "15da5a8412f317beeee6175fbc76f4bb" - }, - "pkgconfig": { - "Package": "pkgconfig", - "Version": "2.0.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "utils" - ], - "Hash": "01f28d4278f15c76cddbea05899c5d6f" - }, - "plogr": { - "Package": "plogr", - "Version": "0.2.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "09eb987710984fc2905c7129c7d85e65" - }, - "pool": { - "Package": "pool", - "Version": "1.0.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "DBI", - "R", - "R6", - "later", - "methods", - "rlang", - "withr" - ], - "Hash": "52d086ff1a2ccccbae6d462cb0773835" - }, - "prettyunits": { - "Package": "prettyunits", - "Version": "1.2.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "95ef9167b75dde9d2ccc3c7528393e7e" - }, - "progress": { - "Package": "progress", - "Version": "1.2.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R6", - "crayon", - "hms", - "prettyunits" - ], - "Hash": "14dc9f7a3c91ebb14ec5bb9208a07061" - }, - "purrr": { - "Package": "purrr", - "Version": "1.0.2", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "lifecycle", - "magrittr", - "rlang", - "vctrs" - ], - "Hash": "d71c815267c640f17ddbf7f16144b4bb" - }, - "rJava": { - "Package": "rJava", - "Version": "1.0-6", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "0415819f6baa75d86d52483f7292b623" - }, - "rappdirs": { - "Package": "rappdirs", - "Version": "0.3.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "5e3c5dc0b071b21fa128676560dbe94d" - }, - "readr": { - "Package": "readr", - "Version": "2.1.4", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "R6", - "cli", - "clipr", - "cpp11", - "crayon", - "hms", - "lifecycle", - "methods", - "rlang", - "tibble", - "tzdb", - "utils", - "vroom" - ], - "Hash": "b5047343b3825f37ad9d3b5d89aa1078" - }, - "renv": { - "Package": "renv", - "Version": "1.0.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "utils" - ], - "Hash": "41b847654f567341725473431dd0d5ab" - }, - "rlang": { - "Package": "rlang", - "Version": "1.1.1", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "utils" - ], - "Hash": "dc079ccd156cde8647360f473c1fa718" - }, - "snow": { - "Package": "snow", - "Version": "0.4-4", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "utils" - ], - "Hash": "40b74690debd20c57d93d8c246b305d4" - }, - "sodium": { - "Package": "sodium", - "Version": "1.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "3606bb09e0914edd4fc8313b500dcd5e" - }, - "stringi": { - "Package": "stringi", - "Version": "1.7.12", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "stats", - "tools", - "utils" - ], - "Hash": "ca8bd84263c77310739d2cf64d84d7c9" - }, - "stringr": { - "Package": "stringr", - "Version": "1.5.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "glue", - "lifecycle", - "magrittr", - "rlang", - "stringi", - "vctrs" - ], - "Hash": "671a4d384ae9d32fc47a14e98bfa3dc8" - }, - "sys": { - "Package": "sys", - "Version": "3.4.2", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "34c16f1ef796057bfa06d3f4ff818a5d" - }, - "tibble": { - "Package": "tibble", - "Version": "3.2.1", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "fansi", - "lifecycle", - "magrittr", - "methods", - "pillar", - "pkgconfig", - "rlang", - "utils", - "vctrs" - ], - "Hash": "a84e2cc86d07289b3b6f5069df7a004c" - }, - "tidyr": { - "Package": "tidyr", - "Version": "1.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "cpp11", - "dplyr", - "glue", - "lifecycle", - "magrittr", - "purrr", - "rlang", - "stringr", - "tibble", - "tidyselect", - "utils", - "vctrs" - ], - "Hash": "e47debdc7ce599b070c8e78e8ac0cfcf" - }, - "tidyselect": { - "Package": "tidyselect", - "Version": "1.2.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cli", - "glue", - "lifecycle", - "rlang", - "vctrs", - "withr" - ], - "Hash": "79540e5fcd9e0435af547d885f184fd5" - }, - "timechange": { - "Package": "timechange", - "Version": "0.2.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cpp11" - ], - "Hash": "8548b44f79a35ba1791308b61e6012d7" - }, - "triebeard": { - "Package": "triebeard", - "Version": "0.4.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "Rcpp" - ], - "Hash": "642507a148b0dd9b5620177e0a044413" - }, - "tzdb": { - "Package": "tzdb", - "Version": "0.4.0", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "cpp11" - ], - "Hash": "b2e1cbce7c903eaf23ec05c58e59fb5e" - }, - "urltools": { - "Package": "urltools", - "Version": "1.7.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "Rcpp", - "methods", - "triebeard" - ], - "Hash": "e86a704261a105f4703f653e05defa3e" - }, - "utf8": { - "Package": "utf8", - "Version": "1.2.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R" - ], - "Hash": "1fe17157424bb09c48a8b3b550c753bc" - }, - "vctrs": { - "Package": "vctrs", - "Version": "0.6.3", - "Source": "Repository", - "Repository": "RSPM", - "Requirements": [ - "R", - "cli", - "glue", - "lifecycle", - "rlang" - ], - "Hash": "06eceb3a5d716fd0654cc23ca3d71a99" - }, - "vroom": { - "Package": "vroom", - "Version": "1.6.3", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "bit64", - "cli", - "cpp11", - "crayon", - "glue", - "hms", - "lifecycle", - "methods", - "progress", - "rlang", - "stats", - "tibble", - "tidyselect", - "tzdb", - "vctrs", - "withr" - ], - "Hash": "7015a74373b83ffaef64023f4a0f5033" - }, - "withr": { - "Package": "withr", - "Version": "2.5.1", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "grDevices", - "graphics", - "stats" - ], - "Hash": "c0e49a9760983e81e55cdd9be92e7182" - }, - "xml2": { - "Package": "xml2", - "Version": "1.3.5", - "Source": "Repository", - "Repository": "CRAN", - "Requirements": [ - "R", - "methods" - ], - "Hash": "40682ed6a969ea5abfd351eb67833adc" - }, - "yaml": { - "Package": "yaml", - "Version": "2.3.7", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "0d0056cc5383fbc240ccd0cb584bf436" - }, - "zip": { - "Package": "zip", - "Version": "2.3.0", - "Source": "Repository", - "Repository": "CRAN", - "Hash": "d98c94dacb7e0efcf83b0a133a705504" - } - } -} diff --git a/inst/testdata/resultsModulesAnalysisSpecifications.json b/inst/testdata/resultsModulesAnalysisSpecifications.json new file mode 100644 index 00000000..345e036c --- /dev/null +++ b/inst/testdata/resultsModulesAnalysisSpecifications.json @@ -0,0 +1,59 @@ +{ + "sharedResources": [], + "moduleSpecifications": [ + { + "module": "EvidenceSynthesisModule", + "settings": { + "evidenceSynthesisAnalysisList": [ + { + "chainLength": 1100000, + "burnIn": 100000, + "subSampleFrequency": 100, + "priorSd": [2, 0.5], + "alpha": 0.05, + "robust": false, + "df": 4, + "seed": 1, + "evidenceSynthesisAnalysisId": 1, + "evidenceSynthesisDescription": "Bayesian random-effects alpha 0.05 - adaptive grid", + "evidenceSynthesisSource": { + "sourceMethod": "CohortMethod", + "likelihoodApproximation": "adaptive grid", + "attr_class": "EvidenceSynthesisSource" + }, + "controlType": "outcome", + "attr_class": ["BayesianMetaAnalysis", "EvidenceSynthesisAnalysis"] + }, + { + "chainLength": 1100000, + "burnIn": 100000, + "subSampleFrequency": 100, + "priorSd": [2, 0.5], + "alpha": 0.05, + "robust": false, + "df": 4, + "seed": 1, + "evidenceSynthesisAnalysisId": 2, + "evidenceSynthesisDescription": "Bayesian random-effects alpha 0.05 - adaptive grid", + "evidenceSynthesisSource": { + "sourceMethod": "SelfControlledCaseSeries", + "likelihoodApproximation": "adaptive grid", + "attr_class": "EvidenceSynthesisSource" + }, + "controlType": "outcome", + "attr_class": ["BayesianMetaAnalysis", "EvidenceSynthesisAnalysis"] + } + ], + "esDiagnosticThresholds": { + "mdrrThreshold": 10, + "easeThreshold": 0.25, + "i2Threshold": 0.4, + "tauThreshold": 0.6931, + "attr_class": "EsDiagnosticThresholds" + } + }, + "attr_class": ["ModuleSpecifications", "EvidenceSynthesisModuleSpecifications"] + } + ], + "attr_class": "AnalysisSpecifications" +} diff --git a/inst/testdata/unitTestAnalysisSpecification.json b/inst/testdata/unitTestAnalysisSpecification.json deleted file mode 100644 index e6b5af7e..00000000 --- a/inst/testdata/unitTestAnalysisSpecification.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "sharedResources": [], - "moduleSpecifications": [ - { - "module": "TestModule1", - "version": "0.0.1", - "settings": [], - "attr_class": ["TestModule1Specifications", "ModuleSpecifications"] - } - ], - "attr_class": "AnalysisSpecifications" -} diff --git a/man-roxygen/connectionDetails.R b/man-roxygen/connectionDetails.R new file mode 100644 index 00000000..07304da8 --- /dev/null +++ b/man-roxygen/connectionDetails.R @@ -0,0 +1,2 @@ +#' @param connectionDetails An object of class `connectionDetails` as created by the +#' [DatabaseConnector::createConnectionDetails()] function. diff --git a/man-roxygen/enforceModuleDependencies.R b/man-roxygen/enforceModuleDependencies.R deleted file mode 100644 index 92393bc8..00000000 --- a/man-roxygen/enforceModuleDependencies.R +++ /dev/null @@ -1,8 +0,0 @@ -#' @param enforceModuleDependencies When set to TRUE, Strategus will enforce -#' module dependencies that are declared by each module. For example, the -#' CohortDiagnostics module declares a dependency on the CohortGenerator module -#' and Strategus will require that an analysis specification declare that both -#' modules must exist in order to execute the analysis. When set to FALSE, -#' Strategus will not enforce these module dependencies which assumes you have -#' properly run all module dependencies yourself. Setting this to FALSE is not -#' recommended since it is potentially unsafe. diff --git a/man-roxygen/forceVerification.R b/man-roxygen/forceVerification.R deleted file mode 100644 index 540cf869..00000000 --- a/man-roxygen/forceVerification.R +++ /dev/null @@ -1,5 +0,0 @@ -#' @param forceVerification When set to TRUE, the verification process is forced -#' to re-evaluate if a module is properly installed. The default is FALSE -#' since if a module is successfully validated, the module will contain -#' the hash value of the module's renv.lock file in the file system so it can -#' by-pass running this check every time. diff --git a/man-roxygen/keyringName.R b/man-roxygen/keyringName.R deleted file mode 100644 index 13737125..00000000 --- a/man-roxygen/keyringName.R +++ /dev/null @@ -1,7 +0,0 @@ -#' @param keyringName The name of the keyring to operate on. This function assumes you have -#' created the keyring before calling this function. It defaults to -#' NULL to select the default keyring. If the keyring is password -#' protected, the password must be stored in the environment variable -#' STRATEGUS_KEYRING_PASSWORD so it is retrieved using the command -#' Sys.getenv("STRATEGUS_KEYRING_PASSWORD") - diff --git a/man-roxygen/moduleSpecifications.R b/man-roxygen/moduleSpecifications.R new file mode 100644 index 00000000..a793e24f --- /dev/null +++ b/man-roxygen/moduleSpecifications.R @@ -0,0 +1 @@ +#' @param moduleSpecifications An object of type `ModuleSpecifications` diff --git a/man-roxygen/resultsConnectionDetails.R b/man-roxygen/resultsConnectionDetails.R new file mode 100644 index 00000000..80a0bd23 --- /dev/null +++ b/man-roxygen/resultsConnectionDetails.R @@ -0,0 +1,3 @@ +#' @param resultsConnectionDetails The connection details to the results database which +#' is an object of class `connectionDetails` as created by the +#' [DatabaseConnector::createConnectionDetails()] function. diff --git a/man-roxygen/resultsDataModelSettings.R b/man-roxygen/resultsDataModelSettings.R new file mode 100644 index 00000000..3c13d0c0 --- /dev/null +++ b/man-roxygen/resultsDataModelSettings.R @@ -0,0 +1 @@ +#' @param resultsDataModelSettings The results data model settings as created using [@seealso [createResultsDataModelSettings()]] diff --git a/man-roxygen/resultsDatabaseSchema.R b/man-roxygen/resultsDatabaseSchema.R new file mode 100644 index 00000000..82177a74 --- /dev/null +++ b/man-roxygen/resultsDatabaseSchema.R @@ -0,0 +1 @@ +#' @param resultsDatabaseSchema The schema in the results database that holds the results data model. diff --git a/man-roxygen/resultsFolder.R b/man-roxygen/resultsFolder.R new file mode 100644 index 00000000..ff7d1fbc --- /dev/null +++ b/man-roxygen/resultsFolder.R @@ -0,0 +1 @@ +#' @param resultsFolder The root folder holding the study results. diff --git a/man-roxygen/tablePrefix.R b/man-roxygen/tablePrefix.R new file mode 100644 index 00000000..b20d050e --- /dev/null +++ b/man-roxygen/tablePrefix.R @@ -0,0 +1 @@ +#' @param tablePrefix A prefix to apply to the database table names (optional). diff --git a/man/CharacterizationModule.Rd b/man/CharacterizationModule.Rd new file mode 100644 index 00000000..691acd20 --- /dev/null +++ b/man/CharacterizationModule.Rd @@ -0,0 +1,208 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Module-Characterization.R +\name{CharacterizationModule} +\alias{CharacterizationModule} +\title{Module for generating cohort characterization information} +\description{ +Computes cohort characterization information against the OMOP CDM +NOTE: Using v1.0.3 version of module and +commit 372fb70c6133bdd8811f8dc1d2a2f9cb9a184345 for the +package +} +\section{Super class}{ +\code{\link[Strategus:StrategusModule]{Strategus::StrategusModule}} -> \code{CharacterizationModule} +} +\section{Public fields}{ +\if{html}{\out{
}} +\describe{ +\item{\code{tablePrefix}}{The table prefix to append to the results tables} +} +\if{html}{\out{
}} +} +\section{Methods}{ +\subsection{Public methods}{ +\itemize{ +\item \href{#method-CharacterizationModule-new}{\code{CharacterizationModule$new()}} +\item \href{#method-CharacterizationModule-execute}{\code{CharacterizationModule$execute()}} +\item \href{#method-CharacterizationModule-createResultsDataModel}{\code{CharacterizationModule$createResultsDataModel()}} +\item \href{#method-CharacterizationModule-uploadResults}{\code{CharacterizationModule$uploadResults()}} +\item \href{#method-CharacterizationModule-createModuleSpecifications}{\code{CharacterizationModule$createModuleSpecifications()}} +\item \href{#method-CharacterizationModule-clone}{\code{CharacterizationModule$clone()}} +} +} +\if{html}{\out{ +
Inherited methods + +
+}} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CharacterizationModule-new}{}}} +\subsection{Method \code{new()}}{ +Initialize the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CharacterizationModule$new()}\if{html}{\out{
}} +} + +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CharacterizationModule-execute}{}}} +\subsection{Method \code{execute()}}{ +Execute characterization +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CharacterizationModule$execute( + connectionDetails, + analysisSpecifications, + executionSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{connectionDetails}}{An object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{executionSettings}}{An object of type \code{ExecutionSettings} as created +by \code{\link[=createCdmExecutionSettings]{createCdmExecutionSettings()}} or \code{\link[=createResultsExecutionSettings]{createResultsExecutionSettings()}}.} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CharacterizationModule-createResultsDataModel}{}}} +\subsection{Method \code{createResultsDataModel()}}{ +Create the results data model for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CharacterizationModule$createResultsDataModel( + resultsConnectionDetails, + resultsDatabaseSchema, + tablePrefix = self$tablePrefix +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsDatabaseSchema}}{The schema in the results database that holds the results data model.} + +\item{\code{tablePrefix}}{A prefix to apply to the database table names (optional).} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CharacterizationModule-uploadResults}{}}} +\subsection{Method \code{uploadResults()}}{ +Upload the results for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CharacterizationModule$uploadResults( + resultsConnectionDetails, + analysisSpecifications, + resultsDataModelSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{resultsDataModelSettings}}{The results data model settings as created using [@seealso \code{\link[=createResultsDataModelSettings]{createResultsDataModelSettings()}}]} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CharacterizationModule-createModuleSpecifications}{}}} +\subsection{Method \code{createModuleSpecifications()}}{ +Creates the CharacterizationModule Specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CharacterizationModule$createModuleSpecifications( + targetIds, + outcomeIds, + dechallengeStopInterval = 30, + dechallengeEvaluationWindow = 30, + timeAtRisk = data.frame(riskWindowStart = c(1, 1), startAnchor = c("cohort start", + "cohort start"), riskWindowEnd = c(0, 365), endAnchor = c("cohort end", + "cohort end")), + minPriorObservation = 0, + minCharacterizationMean = 0, + covariateSettings = FeatureExtraction::createDefaultCovariateSettings() +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{targetIds}}{A vector of cohort IDs to use as the target(s) for the characterization} + +\item{\code{outcomeIds}}{A vector of cohort IDs to use as the outcome(s) for the characterization} + +\item{\code{dechallengeStopInterval}}{description} + +\item{\code{dechallengeEvaluationWindow}}{description} + +\item{\code{timeAtRisk}}{description} + +\item{\code{minPriorObservation}}{description} + +\item{\code{minCharacterizationMean}}{description} + +\item{\code{covariateSettings}}{description} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CharacterizationModule-clone}{}}} +\subsection{Method \code{clone()}}{ +The objects of this class are cloneable with this method. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CharacterizationModule$clone(deep = FALSE)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{deep}}{Whether to make a deep clone.} +} +\if{html}{\out{
}} +} +} +} diff --git a/man/CohortDiagnosticsModule.Rd b/man/CohortDiagnosticsModule.Rd new file mode 100644 index 00000000..75d5bef9 --- /dev/null +++ b/man/CohortDiagnosticsModule.Rd @@ -0,0 +1,247 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Module-CohortDiagnostics.R +\name{CohortDiagnosticsModule} +\alias{CohortDiagnosticsModule} +\title{Module for the development and evaluation of phenotype algorithms} +\description{ +Module for the development and evaluation of phenotype algorithms +against the OMOP Common Data Model. +} +\section{Super class}{ +\code{\link[Strategus:StrategusModule]{Strategus::StrategusModule}} -> \code{CohortDiagnosticsModule} +} +\section{Public fields}{ +\if{html}{\out{
}} +\describe{ +\item{\code{tablePrefix}}{The table prefix to append to results tables} +} +\if{html}{\out{
}} +} +\section{Methods}{ +\subsection{Public methods}{ +\itemize{ +\item \href{#method-CohortDiagnosticsModule-new}{\code{CohortDiagnosticsModule$new()}} +\item \href{#method-CohortDiagnosticsModule-execute}{\code{CohortDiagnosticsModule$execute()}} +\item \href{#method-CohortDiagnosticsModule-createResultsDataModel}{\code{CohortDiagnosticsModule$createResultsDataModel()}} +\item \href{#method-CohortDiagnosticsModule-uploadResults}{\code{CohortDiagnosticsModule$uploadResults()}} +\item \href{#method-CohortDiagnosticsModule-createModuleSpecifications}{\code{CohortDiagnosticsModule$createModuleSpecifications()}} +\item \href{#method-CohortDiagnosticsModule-validateModuleSpecifications}{\code{CohortDiagnosticsModule$validateModuleSpecifications()}} +\item \href{#method-CohortDiagnosticsModule-clone}{\code{CohortDiagnosticsModule$clone()}} +} +} +\if{html}{\out{ +
Inherited methods + +
+}} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortDiagnosticsModule-new}{}}} +\subsection{Method \code{new()}}{ +Initialize the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortDiagnosticsModule$new()}\if{html}{\out{
}} +} + +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortDiagnosticsModule-execute}{}}} +\subsection{Method \code{execute()}}{ +Executes the CohortDiagnostics package +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortDiagnosticsModule$execute( + connectionDetails, + analysisSpecifications, + executionSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{connectionDetails}}{An object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{executionSettings}}{An object of type \code{ExecutionSettings} as created +by \code{\link[=createCdmExecutionSettings]{createCdmExecutionSettings()}} or \code{\link[=createResultsExecutionSettings]{createResultsExecutionSettings()}}.} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortDiagnosticsModule-createResultsDataModel}{}}} +\subsection{Method \code{createResultsDataModel()}}{ +Create the results data model for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortDiagnosticsModule$createResultsDataModel( + resultsConnectionDetails, + resultsDatabaseSchema, + tablePrefix = self$tablePrefix +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsDatabaseSchema}}{The schema in the results database that holds the results data model.} + +\item{\code{tablePrefix}}{A prefix to apply to the database table names (optional).} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortDiagnosticsModule-uploadResults}{}}} +\subsection{Method \code{uploadResults()}}{ +Upload the results for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortDiagnosticsModule$uploadResults( + resultsConnectionDetails, + analysisSpecifications, + resultsDataModelSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{resultsDataModelSettings}}{The results data model settings as created using [@seealso \code{\link[=createResultsDataModelSettings]{createResultsDataModelSettings()}}]} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortDiagnosticsModule-createModuleSpecifications}{}}} +\subsection{Method \code{createModuleSpecifications()}}{ +Creates the CohortDiagnostics Module Specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortDiagnosticsModule$createModuleSpecifications( + cohortIds = NULL, + runInclusionStatistics = TRUE, + runIncludedSourceConcepts = TRUE, + runOrphanConcepts = TRUE, + runTimeSeries = FALSE, + runVisitContext = TRUE, + runBreakdownIndexEvents = TRUE, + runIncidenceRate = TRUE, + runCohortRelationship = TRUE, + runTemporalCohortCharacterization = TRUE, + temporalCovariateSettings = private$.getDefaultCovariateSettings(), + minCharacterizationMean = 0.01, + irWashoutPeriod = 0, + incremental = FALSE +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{cohortIds}}{A list of cohort IDs to use when running the CohortDiagnostics. +Default is NULL which will use all cohorts present in the cohort definition set +in the analysis specification} + +\item{\code{runInclusionStatistics}}{Generate and export statistic on the cohort inclusion rules?} + +\item{\code{runIncludedSourceConcepts}}{Generate and export the source concepts included in the cohorts?} + +\item{\code{runOrphanConcepts}}{Generate and export potential orphan concepts?} + +\item{\code{runTimeSeries}}{Generate and export the time series diagnostics?} + +\item{\code{runVisitContext}}{Generate and export index-date visit context?} + +\item{\code{runBreakdownIndexEvents}}{Generate and export the breakdown of index events?} + +\item{\code{runIncidenceRate}}{Generate and export the cohort incidence rates?} + +\item{\code{runCohortRelationship}}{Generate and export the cohort relationship? Cohort relationship checks the temporal +relationship between two or more cohorts.} + +\item{\code{runTemporalCohortCharacterization}}{Generate and export the temporal cohort characterization? +Only records with values greater than 0.001 are returned.} + +\item{\code{temporalCovariateSettings}}{Either an object of type \code{covariateSettings} as created using one of +the createTemporalCovariateSettings function in the FeatureExtraction package, or a list +of such objects.} + +\item{\code{minCharacterizationMean}}{The minimum mean value for characterization output. Values below this will be cut off from output. This +will help reduce the file size of the characterization output, but will remove information +on covariates that have very low values. The default is 0.001 (i.e. 0.1 percent)} + +\item{\code{irWashoutPeriod}}{Number of days washout to include in calculation of incidence rates - default is 0} + +\item{\code{incremental}}{Create only cohort diagnostics that haven't been created before?} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortDiagnosticsModule-validateModuleSpecifications}{}}} +\subsection{Method \code{validateModuleSpecifications()}}{ +Validate the module specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortDiagnosticsModule$validateModuleSpecifications(moduleSpecifications)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{moduleSpecifications}}{The CohortIncidence module specifications} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortDiagnosticsModule-clone}{}}} +\subsection{Method \code{clone()}}{ +The objects of this class are cloneable with this method. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortDiagnosticsModule$clone(deep = FALSE)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{deep}}{Whether to make a deep clone.} +} +\if{html}{\out{
}} +} +} +} diff --git a/man/CohortGeneratorModule.Rd b/man/CohortGeneratorModule.Rd new file mode 100644 index 00000000..0fc82375 --- /dev/null +++ b/man/CohortGeneratorModule.Rd @@ -0,0 +1,303 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Module-CohortGenerator.R +\name{CohortGeneratorModule} +\alias{CohortGeneratorModule} +\title{Module for generating cohorts against an OMOP CDM} +\description{ +Generates cohorts against the OMOP CDM +} +\section{Super class}{ +\code{\link[Strategus:StrategusModule]{Strategus::StrategusModule}} -> \code{CohortGeneratorModule} +} +\section{Public fields}{ +\if{html}{\out{
}} +\describe{ +\item{\code{cohortDefinitionSharedResourcesClassName}}{A constant for the name +of the cohort definition shared resources section of the analysis +specification} + +\item{\code{negativeControlOutcomeSharedResourcesClassName}}{A constant for the +name of the negative control outcome shared resources section of the +analysis specification} +} +\if{html}{\out{
}} +} +\section{Methods}{ +\subsection{Public methods}{ +\itemize{ +\item \href{#method-CohortGeneratorModule-new}{\code{CohortGeneratorModule$new()}} +\item \href{#method-CohortGeneratorModule-execute}{\code{CohortGeneratorModule$execute()}} +\item \href{#method-CohortGeneratorModule-createResultsDataModel}{\code{CohortGeneratorModule$createResultsDataModel()}} +\item \href{#method-CohortGeneratorModule-uploadResults}{\code{CohortGeneratorModule$uploadResults()}} +\item \href{#method-CohortGeneratorModule-createModuleSpecifications}{\code{CohortGeneratorModule$createModuleSpecifications()}} +\item \href{#method-CohortGeneratorModule-createCohortSharedResourceSpecifications}{\code{CohortGeneratorModule$createCohortSharedResourceSpecifications()}} +\item \href{#method-CohortGeneratorModule-createNegativeControlOutcomeCohortSharedResourceSpecifications}{\code{CohortGeneratorModule$createNegativeControlOutcomeCohortSharedResourceSpecifications()}} +\item \href{#method-CohortGeneratorModule-validateModuleSpecifications}{\code{CohortGeneratorModule$validateModuleSpecifications()}} +\item \href{#method-CohortGeneratorModule-validateCohortSharedResourceSpecifications}{\code{CohortGeneratorModule$validateCohortSharedResourceSpecifications()}} +\item \href{#method-CohortGeneratorModule-validateNegativeControlOutcomeCohortSharedResourceSpecifications}{\code{CohortGeneratorModule$validateNegativeControlOutcomeCohortSharedResourceSpecifications()}} +\item \href{#method-CohortGeneratorModule-clone}{\code{CohortGeneratorModule$clone()}} +} +} +\if{html}{\out{ +
Inherited methods + +
+}} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortGeneratorModule-new}{}}} +\subsection{Method \code{new()}}{ +Initialize the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortGeneratorModule$new()}\if{html}{\out{
}} +} + +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortGeneratorModule-execute}{}}} +\subsection{Method \code{execute()}}{ +Generates the cohorts +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortGeneratorModule$execute( + connectionDetails, + analysisSpecifications, + executionSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{connectionDetails}}{An object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{executionSettings}}{An object of type \code{ExecutionSettings} as created +by \code{\link[=createCdmExecutionSettings]{createCdmExecutionSettings()}} or \code{\link[=createResultsExecutionSettings]{createResultsExecutionSettings()}}.} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortGeneratorModule-createResultsDataModel}{}}} +\subsection{Method \code{createResultsDataModel()}}{ +Create the results data model for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortGeneratorModule$createResultsDataModel( + resultsConnectionDetails, + resultsDatabaseSchema, + tablePrefix = "" +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsDatabaseSchema}}{The schema in the results database that holds the results data model.} + +\item{\code{tablePrefix}}{A prefix to apply to the database table names (optional).} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortGeneratorModule-uploadResults}{}}} +\subsection{Method \code{uploadResults()}}{ +Upload the results for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortGeneratorModule$uploadResults( + resultsConnectionDetails, + analysisSpecifications, + resultsDataModelSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{resultsDataModelSettings}}{The results data model settings as created using [@seealso \code{\link[=createResultsDataModelSettings]{createResultsDataModelSettings()}}]} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortGeneratorModule-createModuleSpecifications}{}}} +\subsection{Method \code{createModuleSpecifications()}}{ +Creates the CohortGenerator Module Specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortGeneratorModule$createModuleSpecifications( + incremental = TRUE, + generateStats = TRUE +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{incremental}}{When TRUE, the module will keep track of the cohorts +generated so that subsequent runs will skip any previously generated +cohorts.} + +\item{\code{generateStats}}{When TRUE, the Circe cohort definition SQL will +include steps to compute inclusion rule statistics.} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortGeneratorModule-createCohortSharedResourceSpecifications}{}}} +\subsection{Method \code{createCohortSharedResourceSpecifications()}}{ +Create shared specifications for the cohort definition set +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortGeneratorModule$createCohortSharedResourceSpecifications( + cohortDefinitionSet +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{cohortDefinitionSet}}{The cohort definition set to include in the +specification. See the CohortGenerator package for details on how to +build this object.} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortGeneratorModule-createNegativeControlOutcomeCohortSharedResourceSpecifications}{}}} +\subsection{Method \code{createNegativeControlOutcomeCohortSharedResourceSpecifications()}}{ +Create shared specifications for the negative control outcomes cohort set +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortGeneratorModule$createNegativeControlOutcomeCohortSharedResourceSpecifications( + negativeControlOutcomeCohortSet, + occurrenceType, + detectOnDescendants +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{negativeControlOutcomeCohortSet}}{The negative control outcome cohort +definition set defines the concepts to use to construct negative control +outcome cohorts. See the CohortGenerator package for more details.} + +\item{\code{occurrenceType}}{Either "first" or "all} + +\item{\code{detectOnDescendants}}{When TRUE, the concept ID for the negative +control will use the \code{concept_ancestor} table and will detect +descendant concepts when constructing the cohort.} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortGeneratorModule-validateModuleSpecifications}{}}} +\subsection{Method \code{validateModuleSpecifications()}}{ +Validate the module specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortGeneratorModule$validateModuleSpecifications(moduleSpecifications)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{moduleSpecifications}}{The CohortGenerator module specifications} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortGeneratorModule-validateCohortSharedResourceSpecifications}{}}} +\subsection{Method \code{validateCohortSharedResourceSpecifications()}}{ +Validate the cohort shared resource specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortGeneratorModule$validateCohortSharedResourceSpecifications( + cohortSharedResourceSpecifications +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{cohortSharedResourceSpecifications}}{The cohort shared resource specifications} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortGeneratorModule-validateNegativeControlOutcomeCohortSharedResourceSpecifications}{}}} +\subsection{Method \code{validateNegativeControlOutcomeCohortSharedResourceSpecifications()}}{ +Validate the cohort shared resource specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortGeneratorModule$validateNegativeControlOutcomeCohortSharedResourceSpecifications( + negativeControlOutcomeCohortSharedResourceSpecifications +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{negativeControlOutcomeCohortSharedResourceSpecifications}}{The cohort shared resource specifications} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortGeneratorModule-clone}{}}} +\subsection{Method \code{clone()}}{ +The objects of this class are cloneable with this method. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortGeneratorModule$clone(deep = FALSE)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{deep}}{Whether to make a deep clone.} +} +\if{html}{\out{
}} +} +} +} diff --git a/man/CohortIncidenceModule.Rd b/man/CohortIncidenceModule.Rd new file mode 100644 index 00000000..1b2608ae --- /dev/null +++ b/man/CohortIncidenceModule.Rd @@ -0,0 +1,191 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Module-CohortIncidence.R +\name{CohortIncidenceModule} +\alias{CohortIncidenceModule} +\title{Module for computing incidence rates for cohorts against an OMOP CDM} +\description{ +Computes incidence rates for cohorts against the OMOP CDM +} +\section{Super class}{ +\code{\link[Strategus:StrategusModule]{Strategus::StrategusModule}} -> \code{CohortIncidenceModule} +} +\section{Methods}{ +\subsection{Public methods}{ +\itemize{ +\item \href{#method-CohortIncidenceModule-new}{\code{CohortIncidenceModule$new()}} +\item \href{#method-CohortIncidenceModule-execute}{\code{CohortIncidenceModule$execute()}} +\item \href{#method-CohortIncidenceModule-createResultsDataModel}{\code{CohortIncidenceModule$createResultsDataModel()}} +\item \href{#method-CohortIncidenceModule-uploadResults}{\code{CohortIncidenceModule$uploadResults()}} +\item \href{#method-CohortIncidenceModule-createModuleSpecifications}{\code{CohortIncidenceModule$createModuleSpecifications()}} +\item \href{#method-CohortIncidenceModule-validateModuleSpecifications}{\code{CohortIncidenceModule$validateModuleSpecifications()}} +\item \href{#method-CohortIncidenceModule-clone}{\code{CohortIncidenceModule$clone()}} +} +} +\if{html}{\out{ +
Inherited methods + +
+}} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortIncidenceModule-new}{}}} +\subsection{Method \code{new()}}{ +Initialize the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortIncidenceModule$new()}\if{html}{\out{
}} +} + +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortIncidenceModule-execute}{}}} +\subsection{Method \code{execute()}}{ +Execute the CohortIncidence package +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortIncidenceModule$execute( + connectionDetails, + analysisSpecifications, + executionSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{connectionDetails}}{An object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{executionSettings}}{An object of type \code{ExecutionSettings} as created +by \code{\link[=createCdmExecutionSettings]{createCdmExecutionSettings()}} or \code{\link[=createResultsExecutionSettings]{createResultsExecutionSettings()}}.} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortIncidenceModule-createResultsDataModel}{}}} +\subsection{Method \code{createResultsDataModel()}}{ +Create the results data model for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortIncidenceModule$createResultsDataModel( + resultsConnectionDetails, + resultsDatabaseSchema, + tablePrefix = "" +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsDatabaseSchema}}{The schema in the results database that holds the results data model.} + +\item{\code{tablePrefix}}{A prefix to apply to the database table names (optional).} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortIncidenceModule-uploadResults}{}}} +\subsection{Method \code{uploadResults()}}{ +Upload the results for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortIncidenceModule$uploadResults( + resultsConnectionDetails, + analysisSpecifications, + resultsDataModelSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{resultsDataModelSettings}}{The results data model settings as created using [@seealso \code{\link[=createResultsDataModelSettings]{createResultsDataModelSettings()}}]} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortIncidenceModule-createModuleSpecifications}{}}} +\subsection{Method \code{createModuleSpecifications()}}{ +Creates the CohortIncidence Module Specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortIncidenceModule$createModuleSpecifications(irDesign = NULL)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{irDesign}}{The incidence rate design created from the CohortIncidence +package} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortIncidenceModule-validateModuleSpecifications}{}}} +\subsection{Method \code{validateModuleSpecifications()}}{ +Validate the module specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortIncidenceModule$validateModuleSpecifications(moduleSpecifications)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{moduleSpecifications}}{The CohortIncidence module specifications} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortIncidenceModule-clone}{}}} +\subsection{Method \code{clone()}}{ +The objects of this class are cloneable with this method. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortIncidenceModule$clone(deep = FALSE)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{deep}}{Whether to make a deep clone.} +} +\if{html}{\out{
}} +} +} +} diff --git a/man/CohortMethodModule.Rd b/man/CohortMethodModule.Rd new file mode 100644 index 00000000..80f98c1c --- /dev/null +++ b/man/CohortMethodModule.Rd @@ -0,0 +1,232 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Module-CohortMethod.R +\name{CohortMethodModule} +\alias{CohortMethodModule} +\title{Module for performing new-user cohort studies} +\description{ +Module for performing new-user cohort studies in an observational +database in the OMOP Common Data Model. +} +\section{Super class}{ +\code{\link[Strategus:StrategusModule]{Strategus::StrategusModule}} -> \code{CohortMethodModule} +} +\section{Methods}{ +\subsection{Public methods}{ +\itemize{ +\item \href{#method-CohortMethodModule-new}{\code{CohortMethodModule$new()}} +\item \href{#method-CohortMethodModule-execute}{\code{CohortMethodModule$execute()}} +\item \href{#method-CohortMethodModule-createResultsDataModel}{\code{CohortMethodModule$createResultsDataModel()}} +\item \href{#method-CohortMethodModule-uploadResults}{\code{CohortMethodModule$uploadResults()}} +\item \href{#method-CohortMethodModule-createModuleSpecifications}{\code{CohortMethodModule$createModuleSpecifications()}} +\item \href{#method-CohortMethodModule-validateModuleSpecifications}{\code{CohortMethodModule$validateModuleSpecifications()}} +\item \href{#method-CohortMethodModule-clone}{\code{CohortMethodModule$clone()}} +} +} +\if{html}{\out{ +
Inherited methods + +
+}} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortMethodModule-new}{}}} +\subsection{Method \code{new()}}{ +Initialize the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortMethodModule$new()}\if{html}{\out{
}} +} + +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortMethodModule-execute}{}}} +\subsection{Method \code{execute()}}{ +Executes the CohortMethod package +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortMethodModule$execute( + connectionDetails, + analysisSpecifications, + executionSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{connectionDetails}}{An object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{The analysis specifications for the study} + +\item{\code{executionSettings}}{An object of type \code{ExecutionSettings} as created +by \code{\link[=createCdmExecutionSettings]{createCdmExecutionSettings()}} or \code{\link[=createResultsExecutionSettings]{createResultsExecutionSettings()}}.} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortMethodModule-createResultsDataModel}{}}} +\subsection{Method \code{createResultsDataModel()}}{ +Create the results data model for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortMethodModule$createResultsDataModel( + resultsConnectionDetails, + resultsDatabaseSchema, + tablePrefix = "" +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsDatabaseSchema}}{The schema in the results database that holds the results data model.} + +\item{\code{tablePrefix}}{A prefix to apply to the database table names (optional).} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortMethodModule-uploadResults}{}}} +\subsection{Method \code{uploadResults()}}{ +Upload the results for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortMethodModule$uploadResults( + resultsConnectionDetails, + analysisSpecifications, + resultsDataModelSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{resultsDataModelSettings}}{The results data model settings as created using [@seealso \code{\link[=createResultsDataModelSettings]{createResultsDataModelSettings()}}]} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortMethodModule-createModuleSpecifications}{}}} +\subsection{Method \code{createModuleSpecifications()}}{ +Creates the CohortMethod Module Specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortMethodModule$createModuleSpecifications( + cmAnalysisList, + targetComparatorOutcomesList, + analysesToExclude = NULL, + refitPsForEveryOutcome = FALSE, + refitPsForEveryStudyPopulation = TRUE, + cmDiagnosticThresholds = CohortMethod::createCmDiagnosticThresholds() +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{cmAnalysisList}}{A list of objects of type \code{cmAnalysis} as created using +the `\link[CohortMethod:createCmAnalysis]{CohortMethod::createCmAnalysis} function.} + +\item{\code{targetComparatorOutcomesList}}{A list of objects of type \code{targetComparatorOutcomes} as +created using the \link[CohortMethod:createTargetComparatorOutcomes]{CohortMethod::createTargetComparatorOutcomes} +function.} + +\item{\code{analysesToExclude}}{Analyses to exclude. See the Analyses to Exclude section for details.} + +\item{\code{refitPsForEveryOutcome}}{Should the propensity model be fitted for every outcome (i.e. +after people who already had the outcome are removed)? If +false, a single propensity model will be fitted, and people +who had the outcome previously will be removed afterwards.} + +\item{\code{refitPsForEveryStudyPopulation}}{Should the propensity model be fitted for every study population +definition? If false, a single propensity model will be fitted, +and the study population criteria will be applied afterwards.} + +\item{\code{cmDiagnosticThresholds}}{An object of type \code{CmDiagnosticThresholds} as created using +\code{\link[CohortMethod:createCmDiagnosticThresholds]{CohortMethod::createCmDiagnosticThresholds()}}.} +} +\if{html}{\out{
}} +} +\subsection{Details}{ +Run a list of analyses for the target-comparator-outcomes of interest. This function will run all +specified analyses against all hypotheses of interest, meaning that the total number of outcome +models is \code{length(cmAnalysisList) * length(targetComparatorOutcomesList)} (if all analyses specify an +outcome model should be fitted). When you provide several analyses it will determine whether any of +the analyses have anything in common, and will take advantage of this fact. For example, if we +specify several analyses that only differ in the way the outcome model is fitted, then this +function will extract the data and fit the propensity model only once, and re-use this in all the +analysis. + +After completion, a tibble containing references to all generated files can be obtained using the +\code{\link[CohortMethod:getFileReference]{CohortMethod::getFileReference()}} function. A summary of the analysis results can be obtained using the +\code{\link[CohortMethod:getResultsSummary]{CohortMethod::getResultsSummary()}} function. +\subsection{Analyses to Exclude}{ + +Normally, \code{runCmAnalyses} will run all combinations of target-comparator-outcome-analyses settings. +However, sometimes we may not need all those combinations. Using the \code{analysesToExclude} argument, +we can remove certain items from the full matrix. This argument should be a data frame with at least +one of the following columns: +} +} + +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortMethodModule-validateModuleSpecifications}{}}} +\subsection{Method \code{validateModuleSpecifications()}}{ +Validate the module specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortMethodModule$validateModuleSpecifications(moduleSpecifications)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{moduleSpecifications}}{The CohortMethod module specifications} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-CohortMethodModule-clone}{}}} +\subsection{Method \code{clone()}}{ +The objects of this class are cloneable with this method. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{CohortMethodModule$clone(deep = FALSE)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{deep}}{Whether to make a deep clone.} +} +\if{html}{\out{
}} +} +} +} diff --git a/man/EvidenceSynthesisModule.Rd b/man/EvidenceSynthesisModule.Rd new file mode 100644 index 00000000..020c99e9 --- /dev/null +++ b/man/EvidenceSynthesisModule.Rd @@ -0,0 +1,402 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Module-EvidenceSynthesis.R +\name{EvidenceSynthesisModule} +\alias{EvidenceSynthesisModule} +\title{Module for for combining causal effect estimates and study diagnostics +across multiple data sites in a distributed study. This includes functions +for performing meta-analysis and forest plots} +\description{ +Module for for combining causal effect estimates and study diagnostics +across multiple data sites in a distributed study. This includes functions +for performing meta-analysis and forest plots +} +\section{Super class}{ +\code{\link[Strategus:StrategusModule]{Strategus::StrategusModule}} -> \code{EvidenceSynthesisModule} +} +\section{Methods}{ +\subsection{Public methods}{ +\itemize{ +\item \href{#method-EvidenceSynthesisModule-new}{\code{EvidenceSynthesisModule$new()}} +\item \href{#method-EvidenceSynthesisModule-execute}{\code{EvidenceSynthesisModule$execute()}} +\item \href{#method-EvidenceSynthesisModule-createResultsDataModel}{\code{EvidenceSynthesisModule$createResultsDataModel()}} +\item \href{#method-EvidenceSynthesisModule-uploadResults}{\code{EvidenceSynthesisModule$uploadResults()}} +\item \href{#method-EvidenceSynthesisModule-validateModuleSpecifications}{\code{EvidenceSynthesisModule$validateModuleSpecifications()}} +\item \href{#method-EvidenceSynthesisModule-createEvidenceSynthesisSource}{\code{EvidenceSynthesisModule$createEvidenceSynthesisSource()}} +\item \href{#method-EvidenceSynthesisModule-createRandomEffectsMetaAnalysis}{\code{EvidenceSynthesisModule$createRandomEffectsMetaAnalysis()}} +\item \href{#method-EvidenceSynthesisModule-createFixedEffectsMetaAnalysis}{\code{EvidenceSynthesisModule$createFixedEffectsMetaAnalysis()}} +\item \href{#method-EvidenceSynthesisModule-createBayesianMetaAnalysis}{\code{EvidenceSynthesisModule$createBayesianMetaAnalysis()}} +\item \href{#method-EvidenceSynthesisModule-createEsDiagnosticThresholds}{\code{EvidenceSynthesisModule$createEsDiagnosticThresholds()}} +\item \href{#method-EvidenceSynthesisModule-createModuleSpecifications}{\code{EvidenceSynthesisModule$createModuleSpecifications()}} +\item \href{#method-EvidenceSynthesisModule-clone}{\code{EvidenceSynthesisModule$clone()}} +} +} +\if{html}{\out{ +
Inherited methods + +
+}} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-EvidenceSynthesisModule-new}{}}} +\subsection{Method \code{new()}}{ +Initialize the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{EvidenceSynthesisModule$new()}\if{html}{\out{
}} +} + +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-EvidenceSynthesisModule-execute}{}}} +\subsection{Method \code{execute()}}{ +Executes the EvidenceSynthesis package +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{EvidenceSynthesisModule$execute( + connectionDetails, + analysisSpecifications, + executionSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{connectionDetails}}{An object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{executionSettings}}{An object of type \code{ExecutionSettings} as created +by \code{\link[=createCdmExecutionSettings]{createCdmExecutionSettings()}} or \code{\link[=createResultsExecutionSettings]{createResultsExecutionSettings()}}.} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-EvidenceSynthesisModule-createResultsDataModel}{}}} +\subsection{Method \code{createResultsDataModel()}}{ +Create the results data model for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{EvidenceSynthesisModule$createResultsDataModel( + resultsConnectionDetails, + resultsDatabaseSchema, + tablePrefix = "" +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsDatabaseSchema}}{The schema in the results database that holds the results data model.} + +\item{\code{tablePrefix}}{A prefix to apply to the database table names (optional).} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-EvidenceSynthesisModule-uploadResults}{}}} +\subsection{Method \code{uploadResults()}}{ +Upload the results for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{EvidenceSynthesisModule$uploadResults( + resultsConnectionDetails, + analysisSpecifications, + resultsDataModelSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{resultsDataModelSettings}}{The results data model settings as created using [@seealso \code{\link[=createResultsDataModelSettings]{createResultsDataModelSettings()}}]} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-EvidenceSynthesisModule-validateModuleSpecifications}{}}} +\subsection{Method \code{validateModuleSpecifications()}}{ +Validate the module specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{EvidenceSynthesisModule$validateModuleSpecifications(moduleSpecifications)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{moduleSpecifications}}{The EvidenceSynthesis module specifications +Create an evidence synthesis source} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-EvidenceSynthesisModule-createEvidenceSynthesisSource}{}}} +\subsection{Method \code{createEvidenceSynthesisSource()}}{ +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{EvidenceSynthesisModule$createEvidenceSynthesisSource( + sourceMethod = "CohortMethod", + databaseIds = NULL, + analysisIds = NULL, + likelihoodApproximation = "adaptive grid" +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{sourceMethod}}{The source method generating the estimates to synthesize. Can be "CohortMethod" or +"SelfControlledCaseSeries"} + +\item{\code{databaseIds}}{The database IDs to include. Use \code{databaseIds = NULL} to include all database IDs.} + +\item{\code{analysisIds}}{The source method analysis IDs to include. Use \code{analysisIds = NULL} to include all +analysis IDs.} + +\item{\code{likelihoodApproximation}}{The type of likelihood approximation. Can be "adaptive grid" or "normal".} +} +\if{html}{\out{
}} +} +\subsection{Returns}{ +An object of type \code{EvidenceSynthesisSource}. +Create parameters for a random-effects meta-analysis +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-EvidenceSynthesisModule-createRandomEffectsMetaAnalysis}{}}} +\subsection{Method \code{createRandomEffectsMetaAnalysis()}}{ +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{EvidenceSynthesisModule$createRandomEffectsMetaAnalysis( + alpha = 0.05, + evidenceSynthesisAnalysisId = 1, + evidenceSynthesisDescription = "Random-effects", + evidenceSynthesisSource = NULL, + controlType = "outcome" +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{alpha}}{The alpha (expected type I error) used for the confidence intervals.} + +\item{\code{evidenceSynthesisAnalysisId}}{description} + +\item{\code{evidenceSynthesisDescription}}{description} + +\item{\code{evidenceSynthesisSource}}{description} + +\item{\code{controlType}}{description +Create a parameter object for the function computeFixedEffectMetaAnalysis} +} +\if{html}{\out{
}} +} +\subsection{Details}{ +Use DerSimonian-Laird meta-analysis +} + +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-EvidenceSynthesisModule-createFixedEffectsMetaAnalysis}{}}} +\subsection{Method \code{createFixedEffectsMetaAnalysis()}}{ +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{EvidenceSynthesisModule$createFixedEffectsMetaAnalysis( + alpha = 0.05, + evidenceSynthesisAnalysisId = 1, + evidenceSynthesisDescription = "Fixed-effects", + evidenceSynthesisSource = NULL, + controlType = "outcome" +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{alpha}}{The alpha (expected type I error) used for the confidence intervals.} + +\item{\code{evidenceSynthesisAnalysisId}}{description} + +\item{\code{evidenceSynthesisDescription}}{description} + +\item{\code{evidenceSynthesisSource}}{description} + +\item{\code{controlType}}{description +Create a parameter object for the function computeBayesianMetaAnalysis} +} +\if{html}{\out{
}} +} +\subsection{Details}{ +Create an object defining the parameter values. +} + +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-EvidenceSynthesisModule-createBayesianMetaAnalysis}{}}} +\subsection{Method \code{createBayesianMetaAnalysis()}}{ +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{EvidenceSynthesisModule$createBayesianMetaAnalysis( + chainLength = 1100000, + burnIn = 1e+05, + subSampleFrequency = 100, + priorSd = c(2, 0.5), + alpha = 0.05, + robust = FALSE, + df = 4, + seed = 1, + evidenceSynthesisAnalysisId = 1, + evidenceSynthesisDescription = "Bayesian random-effects", + evidenceSynthesisSource = NULL, + controlType = "outcome" +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{chainLength}}{Number of MCMC iterations.} + +\item{\code{burnIn}}{Number of MCMC iterations to consider as burn in.} + +\item{\code{subSampleFrequency}}{Subsample frequency for the MCMC.} + +\item{\code{priorSd}}{A two-dimensional vector with the standard deviation of the prior for mu and tau, respectively.} + +\item{\code{alpha}}{The alpha (expected type I error) used for the credible intervals.} + +\item{\code{robust}}{Whether or not to use a t-distribution model; default: FALSE.} + +\item{\code{df}}{Degrees of freedom for the t-model, only used if robust is TRUE.} + +\item{\code{seed}}{The seed for the random number generator.} + +\item{\code{evidenceSynthesisAnalysisId}}{description} + +\item{\code{evidenceSynthesisDescription}}{description} + +\item{\code{evidenceSynthesisSource}}{description} + +\item{\code{controlType}}{description +Create EvidenceSynthesis diagnostics thresholds} +} +\if{html}{\out{
}} +} +\subsection{Details}{ +Create an object defining the parameter values. +} + +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-EvidenceSynthesisModule-createEsDiagnosticThresholds}{}}} +\subsection{Method \code{createEsDiagnosticThresholds()}}{ +Threshold used to determine if we pass or fail diagnostics. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{EvidenceSynthesisModule$createEsDiagnosticThresholds( + mdrrThreshold = 10, + easeThreshold = 0.25, + i2Threshold = 0.4, + tauThreshold = log(2) +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{mdrrThreshold}}{What is the maximum allowed minimum detectable relative risk +(MDRR)?} + +\item{\code{easeThreshold}}{What is the maximum allowed expected absolute systematic error +(EASE).} + +\item{\code{i2Threshold}}{What is the maximum allowed I^2 (measure of between-database +heterogeneity in random-effects models)?} + +\item{\code{tauThreshold}}{What is the maximum allowed tau (measure of between-database +heterogeneity in Bayesian random-effects models)?} +} +\if{html}{\out{
}} +} +\subsection{Returns}{ +An object of type \code{EsDiagnosticThresholds}. +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-EvidenceSynthesisModule-createModuleSpecifications}{}}} +\subsection{Method \code{createModuleSpecifications()}}{ +Creates the module Specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{EvidenceSynthesisModule$createModuleSpecifications( + evidenceSynthesisAnalysisList, + esDiagnosticThresholds = self$createEsDiagnosticThresholds() +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{evidenceSynthesisAnalysisList}}{A list of objects of type \code{EvidenceSynthesisAnalysis} as generated +by either the \href{../../Strategus/html/EvidenceSynthesisModule.html#method-createFixedEffectsMetaAnalysis}{\code{EvidenceSynthesisModule$createFixedEffectsMetaAnalysis()}} +or \href{../../Strategus/html/EvidenceSynthesisModule.html#method-createBayesianMetaAnalysis}{\code{EvidenceSynthesisModule$createBayesianMetaAnalysis()}} function.} + +\item{\code{esDiagnosticThresholds}}{An object of type\code{EsDiagnosticThresholds} as generated by the +\href{../../Strategus/html/EvidenceSynthesisModule.html#method-createEsDiagnosticThresholds}{\code{EvidenceSynthesisModule$createEsDiagnosticThresholds()}} function.} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-EvidenceSynthesisModule-clone}{}}} +\subsection{Method \code{clone()}}{ +The objects of this class are cloneable with this method. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{EvidenceSynthesisModule$clone(deep = FALSE)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{deep}}{Whether to make a deep clone.} +} +\if{html}{\out{
}} +} +} +} diff --git a/man/JobContext.Rd b/man/JobContext.Rd new file mode 100644 index 00000000..66c30bac --- /dev/null +++ b/man/JobContext.Rd @@ -0,0 +1,47 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Module-StrategusModule.R +\name{JobContext} +\alias{JobContext} +\title{Job context holds the elements of the analysis specification +and execution settings necessary to execute a module.} +\description{ +This is an internal class used by the StrategusModule (and child classes) +execute function +} +\section{Public fields}{ +\if{html}{\out{
}} +\describe{ +\item{\code{sharedResources}}{Shared resources for execution +TODO: Revisit to break this into fields for cohorts, subsets, +negative controls,} + +\item{\code{settings}}{Module settings} + +\item{\code{moduleExecutionSettings}}{Module execution settings} +} +\if{html}{\out{
}} +} +\section{Methods}{ +\subsection{Public methods}{ +\itemize{ +\item \href{#method-JobContext-clone}{\code{JobContext$clone()}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-JobContext-clone}{}}} +\subsection{Method \code{clone()}}{ +The objects of this class are cloneable with this method. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{JobContext$clone(deep = FALSE)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{deep}}{Whether to make a deep clone.} +} +\if{html}{\out{
}} +} +} +} diff --git a/man/PatientLevelPredictionModule.Rd b/man/PatientLevelPredictionModule.Rd new file mode 100644 index 00000000..368256e1 --- /dev/null +++ b/man/PatientLevelPredictionModule.Rd @@ -0,0 +1,198 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Module-PatientLevelPrediction.R +\name{PatientLevelPredictionModule} +\alias{PatientLevelPredictionModule} +\title{Module for performing patient-level prediction studies} +\description{ +Module for performing patient-level prediction in an observational +database in the OMOP Common Data Model. +} +\section{Super class}{ +\code{\link[Strategus:StrategusModule]{Strategus::StrategusModule}} -> \code{PatientLevelPredictionModule} +} +\section{Public fields}{ +\if{html}{\out{
}} +\describe{ +\item{\code{tablePrefix}}{The table prefix to append to the results tables} +} +\if{html}{\out{
}} +} +\section{Methods}{ +\subsection{Public methods}{ +\itemize{ +\item \href{#method-PatientLevelPredictionModule-new}{\code{PatientLevelPredictionModule$new()}} +\item \href{#method-PatientLevelPredictionModule-execute}{\code{PatientLevelPredictionModule$execute()}} +\item \href{#method-PatientLevelPredictionModule-createResultsDataModel}{\code{PatientLevelPredictionModule$createResultsDataModel()}} +\item \href{#method-PatientLevelPredictionModule-uploadResults}{\code{PatientLevelPredictionModule$uploadResults()}} +\item \href{#method-PatientLevelPredictionModule-createModuleSpecifications}{\code{PatientLevelPredictionModule$createModuleSpecifications()}} +\item \href{#method-PatientLevelPredictionModule-validateModuleSpecifications}{\code{PatientLevelPredictionModule$validateModuleSpecifications()}} +\item \href{#method-PatientLevelPredictionModule-clone}{\code{PatientLevelPredictionModule$clone()}} +} +} +\if{html}{\out{ +
Inherited methods + +
+}} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-PatientLevelPredictionModule-new}{}}} +\subsection{Method \code{new()}}{ +Initialize the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{PatientLevelPredictionModule$new()}\if{html}{\out{
}} +} + +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-PatientLevelPredictionModule-execute}{}}} +\subsection{Method \code{execute()}}{ +Executes the PatientLevelPrediction package +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{PatientLevelPredictionModule$execute( + connectionDetails, + analysisSpecifications, + executionSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{connectionDetails}}{An object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{executionSettings}}{An object of type \code{ExecutionSettings} as created +by \code{\link[=createCdmExecutionSettings]{createCdmExecutionSettings()}} or \code{\link[=createResultsExecutionSettings]{createResultsExecutionSettings()}}.} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-PatientLevelPredictionModule-createResultsDataModel}{}}} +\subsection{Method \code{createResultsDataModel()}}{ +Create the results data model for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{PatientLevelPredictionModule$createResultsDataModel( + resultsConnectionDetails, + resultsDatabaseSchema, + tablePrefix = self$tablePrefix +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsDatabaseSchema}}{The schema in the results database that holds the results data model.} + +\item{\code{tablePrefix}}{A prefix to apply to the database table names (optional).} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-PatientLevelPredictionModule-uploadResults}{}}} +\subsection{Method \code{uploadResults()}}{ +Upload the results for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{PatientLevelPredictionModule$uploadResults( + resultsConnectionDetails, + analysisSpecifications, + resultsDataModelSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{resultsDataModelSettings}}{The results data model settings as created using [@seealso \code{\link[=createResultsDataModelSettings]{createResultsDataModelSettings()}}]} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-PatientLevelPredictionModule-createModuleSpecifications}{}}} +\subsection{Method \code{createModuleSpecifications()}}{ +Creates the PatientLevelprediction Module Specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{PatientLevelPredictionModule$createModuleSpecifications(modelDesignList)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{modelDesignList}}{description} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-PatientLevelPredictionModule-validateModuleSpecifications}{}}} +\subsection{Method \code{validateModuleSpecifications()}}{ +Validate the module specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{PatientLevelPredictionModule$validateModuleSpecifications(moduleSpecifications)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{moduleSpecifications}}{The PatientLevelPrediction module specifications} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-PatientLevelPredictionModule-clone}{}}} +\subsection{Method \code{clone()}}{ +The objects of this class are cloneable with this method. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{PatientLevelPredictionModule$clone(deep = FALSE)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{deep}}{Whether to make a deep clone.} +} +\if{html}{\out{
}} +} +} +} diff --git a/man/SelfControlledCaseSeriesModule.Rd b/man/SelfControlledCaseSeriesModule.Rd new file mode 100644 index 00000000..38795c4b --- /dev/null +++ b/man/SelfControlledCaseSeriesModule.Rd @@ -0,0 +1,215 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Module-SelfControlledCaseSeries.R +\name{SelfControlledCaseSeriesModule} +\alias{SelfControlledCaseSeriesModule} +\title{Module for performing Self-Controlled Case Series (SCCS) analyses +in an observational database in the OMOP Common Data Model.} +\description{ +Module for performing Self-Controlled Case Series (SCCS) analyses +in an observational database in the OMOP Common Data Model. +} +\section{Super class}{ +\code{\link[Strategus:StrategusModule]{Strategus::StrategusModule}} -> \code{SelfControlledCaseSeriesModule} +} +\section{Public fields}{ +\if{html}{\out{
}} +\describe{ +\item{\code{tablePrefix}}{The table prefix for results tables} +} +\if{html}{\out{
}} +} +\section{Methods}{ +\subsection{Public methods}{ +\itemize{ +\item \href{#method-SelfControlledCaseSeriesModule-new}{\code{SelfControlledCaseSeriesModule$new()}} +\item \href{#method-SelfControlledCaseSeriesModule-execute}{\code{SelfControlledCaseSeriesModule$execute()}} +\item \href{#method-SelfControlledCaseSeriesModule-createResultsDataModel}{\code{SelfControlledCaseSeriesModule$createResultsDataModel()}} +\item \href{#method-SelfControlledCaseSeriesModule-uploadResults}{\code{SelfControlledCaseSeriesModule$uploadResults()}} +\item \href{#method-SelfControlledCaseSeriesModule-createModuleSpecifications}{\code{SelfControlledCaseSeriesModule$createModuleSpecifications()}} +\item \href{#method-SelfControlledCaseSeriesModule-validateModuleSpecifications}{\code{SelfControlledCaseSeriesModule$validateModuleSpecifications()}} +\item \href{#method-SelfControlledCaseSeriesModule-clone}{\code{SelfControlledCaseSeriesModule$clone()}} +} +} +\if{html}{\out{ +
Inherited methods + +
+}} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-SelfControlledCaseSeriesModule-new}{}}} +\subsection{Method \code{new()}}{ +Initialize the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{SelfControlledCaseSeriesModule$new()}\if{html}{\out{
}} +} + +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-SelfControlledCaseSeriesModule-execute}{}}} +\subsection{Method \code{execute()}}{ +Executes the SelfControlledCaseSeries package +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{SelfControlledCaseSeriesModule$execute( + connectionDetails, + analysisSpecifications, + executionSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{connectionDetails}}{An object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{executionSettings}}{An object of type \code{ExecutionSettings} as created +by \code{\link[=createCdmExecutionSettings]{createCdmExecutionSettings()}} or \code{\link[=createResultsExecutionSettings]{createResultsExecutionSettings()}}.} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-SelfControlledCaseSeriesModule-createResultsDataModel}{}}} +\subsection{Method \code{createResultsDataModel()}}{ +Create the results data model for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{SelfControlledCaseSeriesModule$createResultsDataModel( + resultsConnectionDetails, + resultsDatabaseSchema, + tablePrefix = "" +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsDatabaseSchema}}{The schema in the results database that holds the results data model.} + +\item{\code{tablePrefix}}{A prefix to apply to the database table names (optional).} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-SelfControlledCaseSeriesModule-uploadResults}{}}} +\subsection{Method \code{uploadResults()}}{ +Upload the results for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{SelfControlledCaseSeriesModule$uploadResults( + resultsConnectionDetails, + analysisSpecifications, + resultsDataModelSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{resultsDataModelSettings}}{The results data model settings as created using [@seealso \code{\link[=createResultsDataModelSettings]{createResultsDataModelSettings()}}]} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-SelfControlledCaseSeriesModule-createModuleSpecifications}{}}} +\subsection{Method \code{createModuleSpecifications()}}{ +Creates the SelfControlledCaseSeries Module Specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{SelfControlledCaseSeriesModule$createModuleSpecifications( + sccsAnalysisList, + exposuresOutcomeList, + analysesToExclude = NULL, + combineDataFetchAcrossOutcomes = FALSE, + sccsDiagnosticThresholds = SelfControlledCaseSeries::createSccsDiagnosticThresholds() +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{sccsAnalysisList}}{description} + +\item{\code{exposuresOutcomeList}}{description} + +\item{\code{analysesToExclude}}{description} + +\item{\code{combineDataFetchAcrossOutcomes}}{description} + +\item{\code{sccsDiagnosticThresholds}}{description} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-SelfControlledCaseSeriesModule-validateModuleSpecifications}{}}} +\subsection{Method \code{validateModuleSpecifications()}}{ +Validate the module specifications +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{SelfControlledCaseSeriesModule$validateModuleSpecifications( + moduleSpecifications +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{moduleSpecifications}}{The SelfControlledCaseSeries module specifications} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-SelfControlledCaseSeriesModule-clone}{}}} +\subsection{Method \code{clone()}}{ +The objects of this class are cloneable with this method. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{SelfControlledCaseSeriesModule$clone(deep = FALSE)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{deep}}{Whether to make a deep clone.} +} +\if{html}{\out{
}} +} +} +} diff --git a/man/StrategusModule.Rd b/man/StrategusModule.Rd new file mode 100644 index 00000000..4b819059 --- /dev/null +++ b/man/StrategusModule.Rd @@ -0,0 +1,259 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Module-StrategusModule.R +\name{StrategusModule} +\alias{StrategusModule} +\title{StrategusModule defines the base class for each HADES Strategus module} +\description{ +Provides a base class for HADES Strategus modules to inherit +} +\section{Public fields}{ +\if{html}{\out{
}} +\describe{ +\item{\code{moduleName}}{The name of the module taken from the class name. +This is set in the constructor of the class.} + +\item{\code{moduleClassName}}{The class name that identifies +the module specifications in the overall analysis specification. +This is set in the constructor of the class.} + +\item{\code{internalModuleSpecificationClassName}}{A constant value. +The base class name that identifies a module specification +in the analysis specification.} + +\item{\code{internalSharedResourcesClassName}}{A constant value. The class name +that identifies the shared resources section in the overall analysis +specification.} +} +\if{html}{\out{
}} +} +\section{Methods}{ +\subsection{Public methods}{ +\itemize{ +\item \href{#method-StrategusModule-new}{\code{StrategusModule$new()}} +\item \href{#method-StrategusModule-execute}{\code{StrategusModule$execute()}} +\item \href{#method-StrategusModule-createResultsDataModel}{\code{StrategusModule$createResultsDataModel()}} +\item \href{#method-StrategusModule-uploadResults}{\code{StrategusModule$uploadResults()}} +\item \href{#method-StrategusModule-createModuleSpecifications}{\code{StrategusModule$createModuleSpecifications()}} +\item \href{#method-StrategusModule-createSharedResourcesSpecifications}{\code{StrategusModule$createSharedResourcesSpecifications()}} +\item \href{#method-StrategusModule-validateModuleSpecifications}{\code{StrategusModule$validateModuleSpecifications()}} +\item \href{#method-StrategusModule-validateSharedResourcesSpecifications}{\code{StrategusModule$validateSharedResourcesSpecifications()}} +\item \href{#method-StrategusModule-clone}{\code{StrategusModule$clone()}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-StrategusModule-new}{}}} +\subsection{Method \code{new()}}{ +Initialize the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{StrategusModule$new()}\if{html}{\out{
}} +} + +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-StrategusModule-execute}{}}} +\subsection{Method \code{execute()}}{ +Executes the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{StrategusModule$execute( + connectionDetails, + analysisSpecifications, + executionSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{connectionDetails}}{An object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{executionSettings}}{An object of type \code{ExecutionSettings} as created +by \code{\link[=createCdmExecutionSettings]{createCdmExecutionSettings()}} or \code{\link[=createResultsExecutionSettings]{createResultsExecutionSettings()}}.} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-StrategusModule-createResultsDataModel}{}}} +\subsection{Method \code{createResultsDataModel()}}{ +Create the results data model for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{StrategusModule$createResultsDataModel( + resultsConnectionDetails, + resultsDatabaseSchema, + tablePrefix = "" +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsDatabaseSchema}}{The schema in the results database that holds the results data model.} + +\item{\code{tablePrefix}}{A prefix to apply to the database table names (optional).} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-StrategusModule-uploadResults}{}}} +\subsection{Method \code{uploadResults()}}{ +Upload the results for the module +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{StrategusModule$uploadResults( + resultsConnectionDetails, + analysisSpecifications, + resultsDataModelSettings +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{resultsConnectionDetails}}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{analysisSpecifications}}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{\code{resultsDataModelSettings}}{The results data model settings as created using [@seealso \code{\link[=createResultsDataModelSettings]{createResultsDataModelSettings()}}]} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-StrategusModule-createModuleSpecifications}{}}} +\subsection{Method \code{createModuleSpecifications()}}{ +Base function for creating the module settings object. +Each module will have its own implementation and this base class method +will be used to ensure the class of the specifications is set properly. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{StrategusModule$createModuleSpecifications(moduleSpecifications)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{moduleSpecifications}}{An object of type \code{ModuleSpecifications}} + +\item{\code{moduleSpecifications}}{An object of type \code{ModuleSpecifications}} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-StrategusModule-createSharedResourcesSpecifications}{}}} +\subsection{Method \code{createSharedResourcesSpecifications()}}{ +Base function for creating the shared resources settings object. +Each module will have its own implementation if it needs to create +a shared resource. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{StrategusModule$createSharedResourcesSpecifications( + className, + sharedResourcesSpecifications +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{className}}{The class name of the shared resources specifications} + +\item{\code{sharedResourcesSpecifications}}{The shared resources specifications} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-StrategusModule-validateModuleSpecifications}{}}} +\subsection{Method \code{validateModuleSpecifications()}}{ +Base function for validating the module settings object. +Each module will have its own implementation and this base class method +will be used to ensure the module specifications are valid ahead of +execution +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{StrategusModule$validateModuleSpecifications(moduleSpecifications)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{moduleSpecifications}}{An object of type \code{ModuleSpecifications}} + +\item{\code{moduleSpecifications}}{An object of type \code{ModuleSpecifications}} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-StrategusModule-validateSharedResourcesSpecifications}{}}} +\subsection{Method \code{validateSharedResourcesSpecifications()}}{ +Base function for validating the shared resources +specification settings object. Each module will have its own +implementation and this base class method will be used to ensure +the module specifications are valid ahead of execution +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{StrategusModule$validateSharedResourcesSpecifications( + className, + sharedResourcesSpecifications +)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{className}}{The class name of the shared resources specifications} + +\item{\code{sharedResourcesSpecifications}}{The shared resources specifications} +} +\if{html}{\out{
}} +} +} +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-StrategusModule-clone}{}}} +\subsection{Method \code{clone()}}{ +The objects of this class are cloneable with this method. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{StrategusModule$clone(deep = FALSE)}\if{html}{\out{
}} +} + +\subsection{Arguments}{ +\if{html}{\out{
}} +\describe{ +\item{\code{deep}}{Whether to make a deep clone.} +} +\if{html}{\out{
}} +} +} +} diff --git a/man/addCharacterizationModuleSpecifications.Rd b/man/addCharacterizationModuleSpecifications.Rd new file mode 100644 index 00000000..8aad0a60 --- /dev/null +++ b/man/addCharacterizationModuleSpecifications.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Settings.R +\name{addCharacterizationModuleSpecifications} +\alias{addCharacterizationModuleSpecifications} +\title{Add Characterization module specifications to analysis specifications} +\usage{ +addCharacterizationModuleSpecifications( + analysisSpecifications, + moduleSpecifications +) +} +\arguments{ +\item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{moduleSpecifications}{An object of type \code{CharacterizationModule}.} +} +\value{ +Returns the \code{analysisSpecifications} object with the module specifications added. +} +\description{ +Add Characterization module specifications to analysis specifications +} diff --git a/man/addCohortDiagnosticsModuleSpecifications.Rd b/man/addCohortDiagnosticsModuleSpecifications.Rd new file mode 100644 index 00000000..6fc0a737 --- /dev/null +++ b/man/addCohortDiagnosticsModuleSpecifications.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Settings.R +\name{addCohortDiagnosticsModuleSpecifications} +\alias{addCohortDiagnosticsModuleSpecifications} +\title{Add Cohort Diagnostics module specifications to analysis specifications} +\usage{ +addCohortDiagnosticsModuleSpecifications( + analysisSpecifications, + moduleSpecifications +) +} +\arguments{ +\item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{moduleSpecifications}{An object of type \code{CohortDiagnosticsModule}.} +} +\value{ +Returns the \code{analysisSpecifications} object with the module specifications added. +} +\description{ +Add Cohort Diagnostics module specifications to analysis specifications +} diff --git a/man/addCohortGeneratorModuleSpecifications.Rd b/man/addCohortGeneratorModuleSpecifications.Rd new file mode 100644 index 00000000..3bbc8c92 --- /dev/null +++ b/man/addCohortGeneratorModuleSpecifications.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Settings.R +\name{addCohortGeneratorModuleSpecifications} +\alias{addCohortGeneratorModuleSpecifications} +\title{Add Cohort Generator module specifications to analysis specifications} +\usage{ +addCohortGeneratorModuleSpecifications( + analysisSpecifications, + moduleSpecifications +) +} +\arguments{ +\item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{moduleSpecifications}{An object of type \code{CohortGeneratorModule}.} +} +\value{ +Returns the \code{analysisSpecifications} object with the module specifications added. +} +\description{ +Add Cohort Generator module specifications to analysis specifications +} diff --git a/man/addCohortIncidenceModuleSpecifications.Rd b/man/addCohortIncidenceModuleSpecifications.Rd new file mode 100644 index 00000000..9b6e66c4 --- /dev/null +++ b/man/addCohortIncidenceModuleSpecifications.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Settings.R +\name{addCohortIncidenceModuleSpecifications} +\alias{addCohortIncidenceModuleSpecifications} +\title{Add Cohort Incidence module specifications to analysis specifications} +\usage{ +addCohortIncidenceModuleSpecifications( + analysisSpecifications, + moduleSpecifications +) +} +\arguments{ +\item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{moduleSpecifications}{An object of type \code{CohortIncidenceModule}.} +} +\value{ +Returns the \code{analysisSpecifications} object with the module specifications added. +} +\description{ +Add Cohort Incidence module specifications to analysis specifications +} diff --git a/man/addCohortMethodeModuleSpecifications.Rd b/man/addCohortMethodeModuleSpecifications.Rd new file mode 100644 index 00000000..5e6cf026 --- /dev/null +++ b/man/addCohortMethodeModuleSpecifications.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Settings.R +\name{addCohortMethodeModuleSpecifications} +\alias{addCohortMethodeModuleSpecifications} +\title{Add Cohort Method module specifications to analysis specifications} +\usage{ +addCohortMethodeModuleSpecifications( + analysisSpecifications, + moduleSpecifications +) +} +\arguments{ +\item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{moduleSpecifications}{An object of type \code{CohortMethodModule}.} +} +\value{ +Returns the \code{analysisSpecifications} object with the module specifications added. +} +\description{ +Add Cohort Method module specifications to analysis specifications +} diff --git a/man/addEvidenceSynthesisModuleSpecifications.Rd b/man/addEvidenceSynthesisModuleSpecifications.Rd new file mode 100644 index 00000000..8705516a --- /dev/null +++ b/man/addEvidenceSynthesisModuleSpecifications.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Settings.R +\name{addEvidenceSynthesisModuleSpecifications} +\alias{addEvidenceSynthesisModuleSpecifications} +\title{Add Evidence Synthesis module specifications to analysis specifications} +\usage{ +addEvidenceSynthesisModuleSpecifications( + analysisSpecifications, + moduleSpecifications +) +} +\arguments{ +\item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{moduleSpecifications}{An object of type \code{EvidenceSynthesisModule}.} +} +\value{ +Returns the \code{analysisSpecifications} object with the module specifications added. +} +\description{ +Add Evidence Synthesis module specifications to analysis specifications +} diff --git a/man/addModuleSpecifications.Rd b/man/addModuleSpecifications.Rd index 490686cb..608e3e61 100644 --- a/man/addModuleSpecifications.Rd +++ b/man/addModuleSpecifications.Rd @@ -10,7 +10,7 @@ addModuleSpecifications(analysisSpecifications, moduleSpecifications) \item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} -\item{moduleSpecifications}{An object of type \code{ModuleSpecifications}.} +\item{moduleSpecifications}{An object of type \code{ModuleSpecifications}} } \value{ Returns the \code{analysisSpecifications} object with the module specifications added. diff --git a/man/addPatientLevelPredictionModuleSpecifications.Rd b/man/addPatientLevelPredictionModuleSpecifications.Rd new file mode 100644 index 00000000..1cb16c10 --- /dev/null +++ b/man/addPatientLevelPredictionModuleSpecifications.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Settings.R +\name{addPatientLevelPredictionModuleSpecifications} +\alias{addPatientLevelPredictionModuleSpecifications} +\title{Add Patient Level Prediction module specifications to analysis specifications} +\usage{ +addPatientLevelPredictionModuleSpecifications( + analysisSpecifications, + moduleSpecifications +) +} +\arguments{ +\item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{moduleSpecifications}{An object of type \code{PatientLevelPredictionModule}.} +} +\value{ +Returns the \code{analysisSpecifications} object with the module specifications added. +} +\description{ +Add Patient Level Prediction module specifications to analysis specifications +} diff --git a/man/addSelfControlledCaseSeriesModuleSpecifications.Rd b/man/addSelfControlledCaseSeriesModuleSpecifications.Rd new file mode 100644 index 00000000..91069bf7 --- /dev/null +++ b/man/addSelfControlledCaseSeriesModuleSpecifications.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Settings.R +\name{addSelfControlledCaseSeriesModuleSpecifications} +\alias{addSelfControlledCaseSeriesModuleSpecifications} +\title{Add Self Controlled Case Series Module module specifications to analysis specifications} +\usage{ +addSelfControlledCaseSeriesModuleSpecifications( + analysisSpecifications, + moduleSpecifications +) +} +\arguments{ +\item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{moduleSpecifications}{An object of type \code{SelfControlledCaseSeriesModule}.} +} +\value{ +Returns the \code{analysisSpecifications} object with the module specifications added. +} +\description{ +Add Self Controlled Case Series Module module specifications to analysis specifications +} diff --git a/man/compareLockFiles.Rd b/man/compareLockFiles.Rd deleted file mode 100644 index 9dca734f..00000000 --- a/man/compareLockFiles.Rd +++ /dev/null @@ -1,21 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/RenvHelpers.R -\name{compareLockFiles} -\alias{compareLockFiles} -\title{Compare two renv.lock files} -\usage{ -compareLockFiles(filename1, filename2) -} -\arguments{ -\item{filename1}{The first renv.lock file name} - -\item{filename2}{The second renv.lock file name} -} -\value{ -A data.frame with the comparison of the rev.lock files -} -\description{ -Used to compare renv.lock files and return the results in a data.frame. -The return value will include a "full join" representation of the packages -across the two lock files. -} diff --git a/man/createCdmExecutionSettings.Rd b/man/createCdmExecutionSettings.Rd index 96768f97..5e90521c 100644 --- a/man/createCdmExecutionSettings.Rd +++ b/man/createCdmExecutionSettings.Rd @@ -5,7 +5,6 @@ \title{Create CDM execution settings} \usage{ createCdmExecutionSettings( - connectionDetailsReference, workDatabaseSchema, cdmDatabaseSchema, cohortTableNames = CohortGenerator::getCohortTableNames(cohortTable = "cohort"), @@ -13,17 +12,10 @@ createCdmExecutionSettings( workFolder, resultsFolder, logFileName = file.path(resultsFolder, "strategus-log.txt"), - minCellCount = 5, - integerAsNumeric = getOption("databaseConnectorIntegerAsNumeric", default = TRUE), - integer64AsNumeric = getOption("databaseConnectorInteger64AsNumeric", default = TRUE), - resultsConnectionDetailsReference = NULL, - resultsDatabaseSchema = NULL + minCellCount = 5 ) } \arguments{ -\item{connectionDetailsReference}{A string that can be used to retrieve database connection details from a secure local -store.} - \item{workDatabaseSchema}{A database schema where intermediate data can be stored. The user (as identified in the connection details) will need to have write access to this database schema.} @@ -38,21 +30,12 @@ connection details) will need to have read access to this database schema.} \item{workFolder}{A folder in the local file system where intermediate results can be written.} -\item{resultsFolder}{A folder in the local file system where the module output will be written.} +\item{resultsFolder}{The root folder holding the study results.} \item{logFileName}{Logging information from Strategus and all modules will be located in this file. Individual modules will continue to have their own module-specific logs. By default this will be written to the root of the \code{resultsFolder}} \item{minCellCount}{The minimum number of subjects contributing to a count before it can be included in results.} - -\item{integerAsNumeric}{Logical: should 32-bit integers be converted to numeric (double) values? If FALSE 32-bit integers will be represented using R's native \code{Integer} class. Default is TRUE} - -\item{integer64AsNumeric}{Logical: should 64-bit integers be converted to numeric (double) values? If FALSE 64-bit integers will be represented using \code{bit64::integer64}. Default is TRUE} - -\item{resultsConnectionDetailsReference}{A string that can be used to retrieve the results database connection -details from a secure local store.} - -\item{resultsDatabaseSchema}{A schema where the results tables are stored} } \value{ An object of type \code{ExecutionSettings}. diff --git a/man/createResultDataModel.Rd b/man/createResultDataModel.Rd new file mode 100644 index 00000000..d5dcc8f4 --- /dev/null +++ b/man/createResultDataModel.Rd @@ -0,0 +1,27 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ResultDataModel.R +\name{createResultDataModel} +\alias{createResultDataModel} +\title{Create Result Data Model} +\usage{ +createResultDataModel( + analysisSpecifications, + resultsDataModelSettings, + resultsConnectionDetails +) +} +\arguments{ +\item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{resultsDataModelSettings}{The results data model settings as created using [@seealso \code{\link[=createResultsDataModelSettings]{createResultsDataModelSettings()}}]} + +\item{resultsConnectionDetails}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} +} +\description{ +Use this at the study design stage to create data models for modules +This functions loads modules and executes any custom code to create +the results data model in the specified schema in the results database. +} diff --git a/man/createResultDataModels.Rd b/man/createResultDataModels.Rd deleted file mode 100644 index 69975988..00000000 --- a/man/createResultDataModels.Rd +++ /dev/null @@ -1,50 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/ResultModelCreation.R -\name{createResultDataModels} -\alias{createResultDataModels} -\title{Create Result Data Models} -\usage{ -createResultDataModels( - analysisSpecifications, - executionSettings, - executionScriptFolder = NULL, - keyringName = NULL, - restart = FALSE, - enforceModuleDependencies = TRUE -) -} -\arguments{ -\item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created -by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} - -\item{executionSettings}{An object of type \code{ExecutionSettings} as created -by \code{\link[=createCdmExecutionSettings]{createCdmExecutionSettings()}} or \code{\link[=createResultsExecutionSettings]{createResultsExecutionSettings()}}.} - -\item{executionScriptFolder}{Optional: the path to use for storing the execution script. -when NULL, this function will use a temporary -file location to create the script to execute.} - -\item{keyringName}{The name of the keyring to operate on. This function assumes you have -created the keyring before calling this function. It defaults to -NULL to select the default keyring. If the keyring is password -protected, the password must be stored in the environment variable -STRATEGUS_KEYRING_PASSWORD so it is retrieved using the command -Sys.getenv("STRATEGUS_KEYRING_PASSWORD")} - -\item{restart}{Restart run? Requires \code{executionScriptFolder} to be specified, and be -the same as the \code{executionScriptFolder} used in the run to restart.} - -\item{enforceModuleDependencies}{When set to TRUE, Strategus will enforce -module dependencies that are declared by each module. For example, the -CohortDiagnostics module declares a dependency on the CohortGenerator module -and Strategus will require that an analysis specification declare that both -modules must exist in order to execute the analysis. When set to FALSE, -Strategus will not enforce these module dependencies which assumes you have -properly run all module dependencies yourself. Setting this to FALSE is not -recommended since it is potentially unsafe.} -} -\description{ -Use this at the study design stage to create data models for modules -This functions loads modules and executes any custom code to create schemas in a results database -If recreate is set to TRUE all existing data will be removed, otherwise -} diff --git a/man/createResultsDataModelSettings.Rd b/man/createResultsDataModelSettings.Rd new file mode 100644 index 00000000..28afc453 --- /dev/null +++ b/man/createResultsDataModelSettings.Rd @@ -0,0 +1,26 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Settings.R +\name{createResultsDataModelSettings} +\alias{createResultsDataModelSettings} +\title{Create Results Data Model Settings} +\usage{ +createResultsDataModelSettings( + resultsDatabaseSchema, + resultsFolder, + logFileName = file.path(resultsFolder, "strategus-results-data-model-log.txt") +) +} +\arguments{ +\item{resultsDatabaseSchema}{The schema in the results database that holds the results data model.} + +\item{resultsFolder}{The root folder holding the study results.} + +\item{logFileName}{Log location for data model operations} +} +\value{ +An object of type \code{ResultsDataModelSettings} +} +\description{ +The results data model settings are used to create the results data +model and to upload results. +} diff --git a/man/createResultsExecutionSettings.Rd b/man/createResultsExecutionSettings.Rd index 2c088e1a..4a108827 100644 --- a/man/createResultsExecutionSettings.Rd +++ b/man/createResultsExecutionSettings.Rd @@ -5,34 +5,24 @@ \title{Create Results execution settings} \usage{ createResultsExecutionSettings( - resultsConnectionDetailsReference, resultsDatabaseSchema, workFolder, resultsFolder, logFileName = file.path(resultsFolder, "strategus-log.txt"), - minCellCount = 5, - integerAsNumeric = getOption("databaseConnectorIntegerAsNumeric", default = TRUE), - integer64AsNumeric = getOption("databaseConnectorInteger64AsNumeric", default = TRUE) + minCellCount = 5 ) } \arguments{ -\item{resultsConnectionDetailsReference}{A string that can be used to retrieve the results database connection -details from a secure local store.} - -\item{resultsDatabaseSchema}{A schema where the results tables are stored} +\item{resultsDatabaseSchema}{The schema in the results database that holds the results data model.} \item{workFolder}{A folder in the local file system where intermediate results can be written.} -\item{resultsFolder}{A folder in the local file system where the module output will be written.} +\item{resultsFolder}{The root folder holding the study results.} \item{logFileName}{Logging information from Strategus and all modules will be located in this file. Individual modules will continue to have their own module-specific logs. By default this will be written to the root of the \code{resultsFolder}} \item{minCellCount}{The minimum number of subjects contributing to a count before it can be included in results.} - -\item{integerAsNumeric}{Logical: should 32-bit integers be converted to numeric (double) values? If FALSE 32-bit integers will be represented using R's native \code{Integer} class. Default is TRUE} - -\item{integer64AsNumeric}{Logical: should 64-bit integers be converted to numeric (double) values? If FALSE 64-bit integers will be represented using \code{bit64::integer64}. Default is TRUE} } \value{ An object of type \code{ExecutionSettings}. diff --git a/man/dot-nullList.Rd b/man/dot-nullList.Rd deleted file mode 100644 index 5b697abe..00000000 --- a/man/dot-nullList.Rd +++ /dev/null @@ -1,12 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Settings.R -\name{.nullList} -\alias{.nullList} -\title{Used when serializing connection details to retain NULL values} -\usage{ -.nullList() -} -\description{ -Used when serializing connection details to retain NULL values -} -\keyword{internal} diff --git a/man/ensureAllModulesInstantiated.Rd b/man/ensureAllModulesInstantiated.Rd deleted file mode 100644 index 0fc9ed20..00000000 --- a/man/ensureAllModulesInstantiated.Rd +++ /dev/null @@ -1,47 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/ModuleInstantiation.R -\name{ensureAllModulesInstantiated} -\alias{ensureAllModulesInstantiated} -\title{Ensure all modules are instantiated} -\usage{ -ensureAllModulesInstantiated( - analysisSpecifications, - forceVerification = FALSE, - enforceModuleDependencies = TRUE -) -} -\arguments{ -\item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created -by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} - -\item{forceVerification}{When set to TRUE, the verification process is forced -to re-evaluate if a module is properly installed. The default is FALSE -since if a module is successfully validated, the module will contain -the hash value of the module's renv.lock file in the file system so it can -by-pass running this check every time.} - -\item{enforceModuleDependencies}{When set to TRUE, Strategus will enforce -module dependencies that are declared by each module. For example, the -CohortDiagnostics module declares a dependency on the CohortGenerator module -and Strategus will require that an analysis specification declare that both -modules must exist in order to execute the analysis. When set to FALSE, -Strategus will not enforce these module dependencies which assumes you have -properly run all module dependencies yourself. Setting this to FALSE is not -recommended since it is potentially unsafe.} -} -\value{ -A list containing the install status of all modules -(TRUE if all are installed properly) and a tibble listing -the instantiated modules. -} -\description{ -Ensure that all modules referenced in the analysis specifications are instantiated -locally in the folder specified in the \code{INSTANTIATED_MODULES_FOLDER} environmental -variable. - -Missing modules will be fetched from remote repositories. - -This function will also check whether there are different versions of the same -module specified, which is not allowed, and whether all modules required by the -specified modules are also instantiated. -} diff --git a/man/execute.Rd b/man/execute.Rd index 40f20941..671869de 100644 --- a/man/execute.Rd +++ b/man/execute.Rd @@ -4,14 +4,7 @@ \alias{execute} \title{Execute analysis specifications.} \usage{ -execute( - analysisSpecifications, - executionSettings, - executionScriptFolder = NULL, - keyringName = NULL, - restart = FALSE, - enforceModuleDependencies = TRUE -) +execute(analysisSpecifications, executionSettings, connectionDetails) } \arguments{ \item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created @@ -20,28 +13,8 @@ by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecifici \item{executionSettings}{An object of type \code{ExecutionSettings} as created by \code{\link[=createCdmExecutionSettings]{createCdmExecutionSettings()}} or \code{\link[=createResultsExecutionSettings]{createResultsExecutionSettings()}}.} -\item{executionScriptFolder}{Optional: the path to use for storing the execution script. -when NULL, this function will use a temporary -file location to create the script to execute.} - -\item{keyringName}{The name of the keyring to operate on. This function assumes you have -created the keyring before calling this function. It defaults to -NULL to select the default keyring. If the keyring is password -protected, the password must be stored in the environment variable -STRATEGUS_KEYRING_PASSWORD so it is retrieved using the command -Sys.getenv("STRATEGUS_KEYRING_PASSWORD")} - -\item{restart}{Restart run? Requires \code{executionScriptFolder} to be specified, and be -the same as the \code{executionScriptFolder} used in the run to restart.} - -\item{enforceModuleDependencies}{When set to TRUE, Strategus will enforce -module dependencies that are declared by each module. For example, the -CohortDiagnostics module declares a dependency on the CohortGenerator module -and Strategus will require that an analysis specification declare that both -modules must exist in order to execute the analysis. When set to FALSE, -Strategus will not enforce these module dependencies which assumes you have -properly run all module dependencies yourself. Setting this to FALSE is not -recommended since it is potentially unsafe.} +\item{connectionDetails}{An object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} } \value{ Does not return anything. Is called for the side-effect of executing the specified diff --git a/man/getDatabaseIdentifierFilePath.Rd b/man/getDatabaseIdentifierFilePath.Rd new file mode 100644 index 00000000..2a7dca75 --- /dev/null +++ b/man/getDatabaseIdentifierFilePath.Rd @@ -0,0 +1,18 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/DatabaseMetaData.R +\name{getDatabaseIdentifierFilePath} +\alias{getDatabaseIdentifierFilePath} +\title{Provides the file path to the database identifier file created +by Strategus} +\usage{ +getDatabaseIdentifierFilePath(resultsFolder) +} +\arguments{ +\item{resultsFolder}{The root folder holding the study results.} +} +\description{ +This function is used to identify the location of the database identifier +created by Strategus when running an analysis specification. This +location is important when uploading results since the database identifier +may be needed to purge old results for a given database identifier. +} diff --git a/man/getModuleList.Rd b/man/getModuleList.Rd deleted file mode 100644 index 005f84b3..00000000 --- a/man/getModuleList.Rd +++ /dev/null @@ -1,22 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Settings.R -\name{getModuleList} -\alias{getModuleList} -\title{Provides a list of HADES modules to run through Strategus} -\usage{ -getModuleList() -} -\value{ -A data.frame() of modules that work with Strategus. This will contain: -module = The name of the module -version = The version of the module -remote_repo = The remote location of the module (i.e. github.com) -remote_username = The organization of the module (i.e. OHDSI) -module_type = 'cdm' or 'results'. 'cdm' refers to modules that are designed to work against -patient level data in the OMOP CDM format. 'results' refers to modules that are designed -to work against a results database containing output from a 'cdm' module. -} -\description{ -This function provides a list of modules and their locations -that may be used with Strategus. -} diff --git a/man/installLatestModule.Rd b/man/installLatestModule.Rd deleted file mode 100644 index ffe9432e..00000000 --- a/man/installLatestModule.Rd +++ /dev/null @@ -1,21 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/ModuleInstantiation.R -\name{installLatestModule} -\alias{installLatestModule} -\title{Install the latest release of a module} -\usage{ -installLatestModule(moduleName) -} -\arguments{ -\item{moduleName}{The name of the module to install (i.e. "CohortGeneratorModule"). -This parameter must match a value found in the \code{module} column of \code{getModuleList()}} -} -\value{ -None - this function is called for its side effects -} -\description{ -This function will call out to the OHDSI GitHub repo to find the latest -version of the module and attempt to install it. Only modules that are listed -in the \code{getModuleList()} function are allowed since it will have a known -GitHub location. -} diff --git a/man/lockFileToDataFrame.Rd b/man/lockFileToDataFrame.Rd deleted file mode 100644 index e5569b87..00000000 --- a/man/lockFileToDataFrame.Rd +++ /dev/null @@ -1,12 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/RenvHelpers.R -\name{lockFileToDataFrame} -\alias{lockFileToDataFrame} -\title{Convert a lock file to a data.frame} -\usage{ -lockFileToDataFrame(lf) -} -\description{ -Convert a lock file to a data.frame -} -\keyword{internal} diff --git a/man/mandatoryPackages.Rd b/man/mandatoryPackages.Rd deleted file mode 100644 index 3c0c9f7e..00000000 --- a/man/mandatoryPackages.Rd +++ /dev/null @@ -1,12 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/RenvHelpers.R -\name{mandatoryPackages} -\alias{mandatoryPackages} -\title{List of mandatory packages for a Strategus module} -\usage{ -mandatoryPackages() -} -\description{ -List of mandatory packages for a Strategus module -} -\keyword{internal} diff --git a/man/retrieveConnectionDetails.Rd b/man/retrieveConnectionDetails.Rd deleted file mode 100644 index 0c8b899d..00000000 --- a/man/retrieveConnectionDetails.Rd +++ /dev/null @@ -1,28 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Settings.R -\name{retrieveConnectionDetails} -\alias{retrieveConnectionDetails} -\title{Retrieve connection details from the secure location} -\usage{ -retrieveConnectionDetails(connectionDetailsReference, keyringName = NULL) -} -\arguments{ -\item{connectionDetailsReference}{A string that can be used to retrieve the settings from -the secure store.} - -\item{keyringName}{The name of the keyring to operate on. This function assumes you have -created the keyring before calling this function. It defaults to -NULL to select the default keyring. If the keyring is password -protected, the password must be stored in the environment variable -STRATEGUS_KEYRING_PASSWORD so it is retrieved using the command -Sys.getenv("STRATEGUS_KEYRING_PASSWORD")} -} -\value{ -Returns an object of type \code{connectionDetails}. -} -\description{ -Retrieve connection details from the secure location -} -\seealso{ -\code{\link[=storeConnectionDetails]{storeConnectionDetails()}} -} diff --git a/man/runSchemaCreation.Rd b/man/runSchemaCreation.Rd deleted file mode 100644 index 821bab19..00000000 --- a/man/runSchemaCreation.Rd +++ /dev/null @@ -1,35 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/ResultModelCreation.R -\name{runSchemaCreation} -\alias{runSchemaCreation} -\title{Create module(s) result data model} -\usage{ -runSchemaCreation( - analysisSpecifications, - keyringSettings, - moduleIndex, - executionSettings, - ... -) -} -\arguments{ -\item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created -by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} - -\item{keyringSettings}{The keyringSettings from the executionSettings context} - -\item{moduleIndex}{The index of the module in the analysis specification} - -\item{executionSettings}{An object of type \code{ExecutionSettings} as created -by \code{\link[=createCdmExecutionSettings]{createCdmExecutionSettings()}} or \code{\link[=createResultsExecutionSettings]{createResultsExecutionSettings()}}.} - -\item{...}{For future expansion} -} -\description{ -This function will create the results data model for the modules in the -\code{analysisSpecifications}. A module can implement its own results data model -creation function by implementing the function \code{createDataModelSchema} in -its Main.R. The default behavior is to use the \code{ResultsModelManager} to create -the results data model based on the \code{resultsDataModelSpecification.csv} in the -module's results folder. -} diff --git a/man/storeConnectionDetails.Rd b/man/storeConnectionDetails.Rd deleted file mode 100644 index b75ce038..00000000 --- a/man/storeConnectionDetails.Rd +++ /dev/null @@ -1,36 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Settings.R -\name{storeConnectionDetails} -\alias{storeConnectionDetails} -\title{Store connection details in a secure location} -\usage{ -storeConnectionDetails( - connectionDetails, - connectionDetailsReference, - keyringName = NULL -) -} -\arguments{ -\item{connectionDetails}{An object of type \code{connectionDetails} as created by the -\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} - -\item{connectionDetailsReference}{A string that can be used to retrieve the settings from -the secure store.} - -\item{keyringName}{The name of the keyring to operate on. This function assumes you have -created the keyring before calling this function. It defaults to -NULL to select the default keyring. If the keyring is password -protected, the password must be stored in the environment variable -STRATEGUS_KEYRING_PASSWORD so it is retrieved using the command -Sys.getenv("STRATEGUS_KEYRING_PASSWORD")} -} -\value{ -Does not return anything. Is called for the side effect of having the connection details -stored. -} -\description{ -Store connection details in a secure location -} -\seealso{ -\code{\link[=retrieveConnectionDetails]{retrieveConnectionDetails()}} -} diff --git a/man/suggestedPacakges.Rd b/man/suggestedPacakges.Rd deleted file mode 100644 index e1ed2d31..00000000 --- a/man/suggestedPacakges.Rd +++ /dev/null @@ -1,12 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/RenvHelpers.R -\name{suggestedPacakges} -\alias{suggestedPacakges} -\title{List of suggested packages for a Strategus module} -\usage{ -suggestedPacakges() -} -\description{ -List of suggested packages for a Strategus module -} -\keyword{internal} diff --git a/man/syncLockFile.Rd b/man/syncLockFile.Rd deleted file mode 100644 index 66dd8bff..00000000 --- a/man/syncLockFile.Rd +++ /dev/null @@ -1,31 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/RenvHelpers.R -\name{syncLockFile} -\alias{syncLockFile} -\title{Synchronize renv.lock files and overwrite the target file -(read the description)} -\usage{ -syncLockFile(sourceOfTruthLockFileName, targetLockFileName) -} -\arguments{ -\item{sourceOfTruthLockFileName}{The renv.lock file to use as the source of -truth} - -\item{targetLockFileName}{The target renv.lock file that will be synced with -the source of truth} -} -\value{ -A data.frame containing the different packages and their version that -were involved in the synchronization process -} -\description{ -Used to synchronize the values from the "source of truth" renv.lock file to -the target renv.lock file. Packages are compared (by name) and if the version -of the package in the "source of truth" is greater the one found in the -target, the target renv.lock file will be updated. This function will -automatically update the target file. - -Version comparison is handled by the \code{semver} package and since most packages -use semantic versioning. When a package does not use semantic versioning, -a warning is provided so the user can review. -} diff --git a/man/unlockKeyring.Rd b/man/unlockKeyring.Rd deleted file mode 100644 index 9f1d7dc2..00000000 --- a/man/unlockKeyring.Rd +++ /dev/null @@ -1,25 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Settings.R -\name{unlockKeyring} -\alias{unlockKeyring} -\title{Helper function to unlock a keyring} -\usage{ -unlockKeyring(keyringName) -} -\arguments{ -\item{keyringName}{The name of the keyring to operate on. This function assumes you have -created the keyring before calling this function. It defaults to -NULL to select the default keyring. If the keyring is password -protected, the password must be stored in the environment variable -STRATEGUS_KEYRING_PASSWORD so it is retrieved using the command -Sys.getenv("STRATEGUS_KEYRING_PASSWORD")} -} -\value{ -Returns TRUE if the keyring was unlocked using the password otherwise -it returns FALSE -} -\description{ -This helper function is used to unlock a keyring by using the password -stored in Sys.getenv("STRATEGUS_KEYRING_PASSWORD"). It will alert -the user if the environment variable with the password is not set. -} diff --git a/man/uploadResults.Rd b/man/uploadResults.Rd new file mode 100644 index 00000000..e56f3889 --- /dev/null +++ b/man/uploadResults.Rd @@ -0,0 +1,25 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ResultDataModel.R +\name{uploadResults} +\alias{uploadResults} +\title{Upload results} +\usage{ +uploadResults( + analysisSpecifications, + resultsDataModelSettings, + resultsConnectionDetails +) +} +\arguments{ +\item{analysisSpecifications}{An object of type \code{AnalysisSpecifications} as created +by \code{\link[=createEmptyAnalysisSpecificiations]{createEmptyAnalysisSpecificiations()}}.} + +\item{resultsDataModelSettings}{The results data model settings as created using [@seealso \code{\link[=createResultsDataModelSettings]{createResultsDataModelSettings()}}]} + +\item{resultsConnectionDetails}{The connection details to the results database which +is an object of class \code{connectionDetails} as created by the +\code{\link[DatabaseConnector:createConnectionDetails]{DatabaseConnector::createConnectionDetails()}} function.} +} +\description{ +Upload the results for a given analysis +} diff --git a/man/validateLockFile.Rd b/man/validateLockFile.Rd deleted file mode 100644 index 42d79be0..00000000 --- a/man/validateLockFile.Rd +++ /dev/null @@ -1,17 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/RenvHelpers.R -\name{validateLockFile} -\alias{validateLockFile} -\title{Validate an renv.lock file to ensure it is ready for use by Strategus} -\usage{ -validateLockFile(filename) -} -\arguments{ -\item{filename}{The renv.lock file to validate} -} -\description{ -Will check an renv.lock file for a module to verify that it only references -tagged packages and includes the packages required by Strategus. It will -also check for suggested packages that are useful for testing, such as -RSQLite. -} diff --git a/man/verifyModuleInstallation.Rd b/man/verifyModuleInstallation.Rd deleted file mode 100644 index d0b96c1b..00000000 --- a/man/verifyModuleInstallation.Rd +++ /dev/null @@ -1,45 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/ModuleInstantiation.R -\name{verifyModuleInstallation} -\alias{verifyModuleInstallation} -\title{Verify a module is properly installed} -\usage{ -verifyModuleInstallation( - module, - version, - silent = FALSE, - forceVerification = FALSE -) -} -\arguments{ -\item{module}{The name of the module to verify (i.e. "CohortGeneratorModule")} - -\item{version}{The version of the module to verify (i.e. "0.2.1")} - -\item{silent}{When TRUE output of this verification process is suppressed} - -\item{forceVerification}{When set to TRUE, the verification process is forced -to re-evaluate if a module is properly installed. The default is FALSE -since if a module is successfully validated, the module will contain -the hash value of the module's renv.lock file in the file system so it can -by-pass running this check every time.} -} -\value{ -A list with the output of the consistency check -} -\description{ -In some instances a module may fail to instantiate and install due to problems -when calling renv::restore for the module's renv.lock file. This function -will allow you to surface inconsistencies between the module renv.lock file -and the module's renv project library. This function will check to that a -module has been properly installed using internal functions of the \code{renv} -package. If a module is verified to work via this function, the hash of -the module's renv.lock file will be written to a text file in the module -directory to indicate that it is ready for use. This will allow subsequent -calls to work faster since the initial verification process can take some -time.It is possible to re-run the verification of a module -by using the \code{forceVerification} parameter. - -To fix issues with a module, you will need to open the module's .Rproj in -RStudio instance and debug the issues when calling renv::restore(). -} diff --git a/man/withModuleRenv.Rd b/man/withModuleRenv.Rd deleted file mode 100644 index dd61f4e0..00000000 --- a/man/withModuleRenv.Rd +++ /dev/null @@ -1,41 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/ModuleEnv.R -\name{withModuleRenv} -\alias{withModuleRenv} -\title{Load module execution space inside and renv -inspired by targets::tar_script but allowing custom variable execution} -\usage{ -withModuleRenv( - code, - moduleFolder, - injectVars = list(), - tempScriptFile = tempfile(fileext = ".R"), - job = FALSE, - processName = paste(moduleFolder, "_renv_run") -) -} -\arguments{ -\item{code}{code block to execute} - -\item{moduleFolder}{Instantiated Strategus module folder} - -\item{injectVars}{list of var names list(name=value) to replace (e.g. replace list(foo = "some string") will -find the pattern foo and replace it with the string some string - be careful!} - -\item{tempScriptFile}{tempFile to write script to} - -\item{job}{run as rstudio job} - -\item{processName}{String name for process} -} -\value{ -NULL invisibly -} -\description{ -Designed to allow more human readable code that is executed inside a module as well as simple variable substituion -for injecting constants (e.g. simple parameters or file paths used inside and outside of modules) -} -\details{ -This pattern also allows dependency injection which could be used if you don't want to use and renv and (instead) -would like to use docker images or just execution in the base environment for testing/debugging -} diff --git a/man/zipResults.Rd b/man/zipResults.Rd index 4e673245..bacb866f 100644 --- a/man/zipResults.Rd +++ b/man/zipResults.Rd @@ -7,8 +7,7 @@ zipResults(resultsFolder, zipFile) } \arguments{ -\item{resultsFolder}{The folder holding the study results. This is found in -\code{executionSettings$resultsFolder}.} +\item{resultsFolder}{The root folder holding the study results.} \item{zipFile}{The path to the zip file to be created.} } diff --git a/tests/testthat/setup.R b/tests/testthat/setup.R index 9d0a2c45..2d69d0d6 100644 --- a/tests/testthat/setup.R +++ b/tests/testthat/setup.R @@ -1,18 +1,9 @@ library(testthat) library(Strategus) library(Eunomia) +library(CohortGenerator) library(dplyr) -# allows unit tests to run on mac without issue -baseBackend <- Sys.getenv("R_KEYRING_BACKEND") -Sys.setenv("R_KEYRING_BACKEND" = "file") -withr::defer( - { - Sys.setenv("R_KEYRING_BACKEND" = baseBackend) - }, - testthat::teardown_env() -) - if (dir.exists(Sys.getenv("DATABASECONNECTOR_JAR_FOLDER"))) { jdbcDriverFolder <- Sys.getenv("DATABASECONNECTOR_JAR_FOLDER") } else { @@ -28,25 +19,12 @@ if (dir.exists(Sys.getenv("DATABASECONNECTOR_JAR_FOLDER"))) { testthat::teardown_env() ) } - # Create a unique ID for the table identifiers tableSuffix <- paste0(substr(.Platform$OS.type, 1, 3), format(Sys.time(), "%y%m%d%H%M%S"), sample(1:100, 1)) tableSuffix <- abs(digest::digest2int(tableSuffix)) -usingTempDir <- Sys.getenv("STRATEGUS_UNIT_TEST_FOLDER") == "" -tempDir <- ifelse(usingTempDir, tempfile(), Sys.getenv("STRATEGUS_UNIT_TEST_FOLDER")) +tempDir <- tempfile() tempDir <- gsub("\\\\", "/", tempDir) # Correct windows path -renvCachePath <- file.path(tempDir, "strategus/renv") -moduleFolder <- file.path(tempDir, "strategus/modules") -Sys.setenv("INSTANTIATED_MODULES_FOLDER" = moduleFolder) -withr::defer( - { - if (usingTempDir) { - unlink(c(tempDir, renvCachePath, moduleFolder), recursive = TRUE, force = TRUE) - } - }, - testthat::teardown_env() -) # Assemble a list of connectionDetails for the tests ----------- connectionDetailsList <- list() @@ -176,38 +154,3 @@ if (!(Sys.getenv("CDM5_SQL_SERVER_USER") == "" & ) } -# Keyring helpers -------------- -# Set the keyring name & password for testing -keyringName <- "strategus" -keyringPassword <- "ohdsi" - -deleteKeyringForUnitTest <- function(selectedKeyring = keyringName, selectedKeyringPassword = keyringPassword) { - # Create a keyring called "strategus" that is password protected - allKeyrings <- keyring::keyring_list() - if (selectedKeyring %in% allKeyrings$keyring) { - if (keyring::keyring_is_locked(keyring = selectedKeyring)) { - keyring::keyring_unlock(keyring = selectedKeyring, password = selectedKeyringPassword) - } - # Delete all keys from the keyring so we can delete it - keys <- keyring::key_list(keyring = selectedKeyring) - if (nrow(keys) > 0) { - for (i in 1:nrow(keys)) { - keyring::key_delete(keys$service[1], keyring = selectedKeyring) - } - } - keyring::keyring_delete(keyring = selectedKeyring) - } -} - -createKeyringForUnitTest <- function(selectedKeyring = keyringName, selectedKeyringPassword = keyringPassword) { - # Delete any existing keyrings - deleteKeyringForUnitTest(selectedKeyring = selectedKeyring) - # Create & Lock the keyring - keyring::keyring_create(keyring = selectedKeyring, password = selectedKeyringPassword) - keyring::keyring_lock(keyring = selectedKeyring) -} - -skip_if_not_secret_service <- function() { - if (keyring::default_backend()$name != "secret service") skip("Not secret service") - invisible(TRUE) -} diff --git a/tests/testthat/test-DatabaseMetaData.R b/tests/testthat/test-DatabaseMetaData.R index c6eda6ca..f7377ecf 100644 --- a/tests/testthat/test-DatabaseMetaData.R +++ b/tests/testthat/test-DatabaseMetaData.R @@ -1,5 +1,4 @@ test_that("Test DatabaseMetaData error conditions", { - skip_if_not_secret_service() # Run this test in isolation as it will make changes to the CDM schema. eunomiaConnectionDetails <- Eunomia::getEunomiaConnectionDetails() connection <- DatabaseConnector::connect(eunomiaConnectionDetails) @@ -32,24 +31,12 @@ test_that("Test DatabaseMetaData error conditions", { ) } DatabaseConnector::disconnect(connection) - unlink(eunomiaConnectionDetails$server, recursive = TRUE, force = TRUE) }, testthat::teardown_env() ) - # Setup keyring for the test - Sys.setenv("STRATEGUS_KEYRING_PASSWORD" = keyringPassword) - createKeyringForUnitTest(selectedKeyring = keyringName, selectedKeyringPassword = keyringPassword) - on.exit(deleteKeyringForUnitTest()) - # Confirm an error is thrown when 1 or more of these tables are missing - Strategus::storeConnectionDetails( - connectionDetails = eunomiaConnectionDetails, - connectionDetailsReference = dbms, - keyringName = keyringName - ) executionSettings <- Strategus::createCdmExecutionSettings( - connectionDetailsReference = dbms, workDatabaseSchema = workDatabaseSchema, cdmDatabaseSchema = cdmDatabaseSchema, cohortTableNames = CohortGenerator::getCohortTableNames(), @@ -58,9 +45,9 @@ test_that("Test DatabaseMetaData error conditions", { minCellCount = 5 ) expect_error( - Strategus:::createDatabaseMetaData( + Strategus:::.createDatabaseMetaData( executionSettings = executionSettings, - keyringName = keyringName + connectionDetails = eunomiaConnectionDetails ), regexp = "FATAL ERROR: Your OMOP CDM is missing the following required tables: cdm_source, vocabulary, observation_period" ) @@ -79,9 +66,9 @@ test_that("Test DatabaseMetaData error conditions", { } expect_error( - Strategus:::createDatabaseMetaData( + Strategus:::.createDatabaseMetaData( executionSettings = executionSettings, - keyringName = keyringName + connectionDetails = eunomiaConnectionDetails ), regexp = "FATAL ERROR: The CDM_SOURCE table in your OMOP CDM is empty." ) @@ -98,9 +85,9 @@ test_that("Test DatabaseMetaData error conditions", { ) expect_error( - Strategus:::createDatabaseMetaData( + Strategus:::.createDatabaseMetaData( executionSettings = executionSettings, - keyringName = keyringName + connectionDetails = eunomiaConnectionDetails ), regexp = "FATAL ERROR: The VOCABULARY table in your OMOP CDM is missing the version" ) @@ -115,9 +102,9 @@ test_that("Test DatabaseMetaData error conditions", { backup_table = "vocabulary_bak" ) expect_error( - Strategus:::createDatabaseMetaData( + Strategus:::.createDatabaseMetaData( executionSettings = executionSettings, - keyringName = keyringName + connectionDetails = eunomiaConnectionDetails ), regexp = "FATAL ERROR: The OBSERVATION_PERIOD table in your OMOP CDM lacks a maximum observation_period_end_date" ) diff --git a/tests/testthat/test-Execution.R b/tests/testthat/test-Execution.R new file mode 100644 index 00000000..2404b711 --- /dev/null +++ b/tests/testthat/test-Execution.R @@ -0,0 +1,181 @@ +test_that("Execute study, upload results, excute results modules and upload results", { + analysisSpecifications <- ParallelLogger::loadSettingsFromJson( + fileName = system.file("testdata/cdmModulesAnalysisSpecifications.json", + package = "Strategus" + ) + ) + studyRootFolder <- file.path(tempDir, "EunomiaTestStudy") + workFolder <- file.path(studyRootFolder, "work_folder") + resultsFolder <- file.path(studyRootFolder, "results_folder") + if (!dir.exists(studyRootFolder)) { + dir.create(studyRootFolder, recursive = TRUE) + } + + withr::defer( + { + unlink(studyRootFolder, recursive = TRUE, force = TRUE) + }, + testthat::teardown_env() + ) + + # Execute the study --------------------------- + executionSettings <- createCdmExecutionSettings( + workDatabaseSchema = workDatabaseSchema, + cdmDatabaseSchema = cdmDatabaseSchema, + cohortTableNames = CohortGenerator::getCohortTableNames(cohortTable = "unit_test"), + workFolder = workFolder, + resultsFolder = resultsFolder + ) + + ParallelLogger::saveSettingsToJson( + object = executionSettings, + file.path(studyRootFolder, "eunomiaExecutionSettings.json") + ) + + executionSettings <- ParallelLogger::loadSettingsFromJson( + fileName = file.path(studyRootFolder, "eunomiaExecutionSettings.json") + ) + + expect_warning( + Strategus::execute( + connectionDetails = connectionDetails, + analysisSpecifications = analysisSpecifications, + executionSettings = executionSettings + ) + ) + + # Create a results DB and upload results + dbFilePath <- file.path(studyRootFolder, "testdm.sqlite") + mydb <- dbConnect(RSQLite::SQLite(), dbFilePath) + dbDisconnect(mydb) + + withr::defer( + { + unlink(dbFilePath, recursive = TRUE, force = TRUE) + }, + testthat::teardown_env() + ) + + resultsConnectionDetails <- DatabaseConnector::createConnectionDetails( + dbms = "sqlite", + server = dbFilePath + ) + + resultsDataModelSettings <- Strategus::createResultsDataModelSettings( + resultsDatabaseSchema = "main", + resultsFolder = executionSettings$resultsFolder + ) + + # Create cdm modules results data model ------------------------- + cdmModulesAnalysisSpecifications <- ParallelLogger::loadSettingsFromJson( + fileName = system.file("testdata/cdmModulesAnalysisSpecifications.json", + package = "Strategus" + ) + ) + + Strategus::createResultDataModel( + analysisSpecifications = cdmModulesAnalysisSpecifications, + resultsDataModelSettings = resultsDataModelSettings, + resultsConnectionDetails = resultsConnectionDetails + ) + + # Upload cdm related results -------------------- + Strategus::uploadResults( + analysisSpecifications = analysisSpecifications, + resultsDataModelSettings = resultsDataModelSettings, + resultsConnectionDetails = resultsConnectionDetails + ) + + # Execute results modules ------------------------- + resultsModulesAnalysisSpecifications <- ParallelLogger::loadSettingsFromJson( + fileName = system.file("testdata/resultsModulesAnalysisSpecifications.json", + package = "Strategus" + ) + ) + + resultsExecutionSettings <- Strategus::createResultsExecutionSettings( + resultsDatabaseSchema = resultsDataModelSettings$resultsDatabaseSchema, + workFolder = file.path(studyRootFolder, "results_modules", "work_folder"), + resultsFolder = file.path(studyRootFolder, "results_modules", "results_folder") + ) + + Strategus::execute( + connectionDetails = resultsConnectionDetails, + analysisSpecifications = resultsModulesAnalysisSpecifications, + executionSettings = resultsExecutionSettings + ) + + # Create the results data model ------ + resultsDataModelSettings <- Strategus::createResultsDataModelSettings( + resultsDatabaseSchema = "main", + resultsFolder = resultsExecutionSettings$resultsFolder + ) + + # NOTE: This will throw a warning since the database metadata + # does not exist + expect_warning( + Strategus::createResultDataModel( + analysisSpecifications = resultsModulesAnalysisSpecifications, + resultsDataModelSettings = resultsDataModelSettings, + resultsConnectionDetails = resultsConnectionDetails + ) + ) + + # Upload the results ------------- + resultsDataModelSettings <- Strategus::createResultsDataModelSettings( + resultsDatabaseSchema = resultsDataModelSettings$resultsDatabaseSchema, + resultsFolder = resultsExecutionSettings$resultsFolder + ) + + # NOTE: This will throw a warning since the database metadata + # does not exist + expect_warning( + Strategus::uploadResults( + analysisSpecifications = resultsModulesAnalysisSpecifications, + resultsDataModelSettings = resultsDataModelSettings, + resultsConnectionDetails = resultsConnectionDetails + ) + ) + + # Get a list of tables + connection <- DatabaseConnector::connect(resultsConnectionDetails) + on.exit(DatabaseConnector::disconnect(connection)) + tableList <- DatabaseConnector::getTableNames( + connection = connection, + databaseSchema = resultsDataModelSettings$resultsDatabaseSchema + ) + + expect_true(length(tableList) > 0) +}) + +test_that("Execute on Oracle stops if table names exceed length limit", { + sqlRenderTempEmulationSchema <- getOption("sqlRenderTempEmulationSchema", default = "") + options(sqlRenderTempEmulationSchema = "some_schema") + on.exit(options(sqlRenderTempEmulationSchema = sqlRenderTempEmulationSchema)) + + connectionDetails <- DatabaseConnector::createConnectionDetails( + dbms = "oracle" + ) + executionSettings <- Strategus::createCdmExecutionSettings( + workDatabaseSchema = "does_not_matter", + cdmDatabaseSchema = "does_not_matter", + cohortTableNames = CohortGenerator::getCohortTableNames("some_really_long_table_name_for_testing_that_oracle_throws_a_warning"), + workFolder = file.path(tempDir, "work_folder"), + resultsFolder = file.path(tempDir, "results_folder"), + minCellCount = 5 + ) + + analysisSpecifications <- ParallelLogger::loadSettingsFromJson( + fileName = system.file("testdata/cdmModulesAnalysisSpecifications.json", + package = "Strategus" + ) + ) + + expect_error( + Strategus::execute( + connectionDetails = connectionDetails, + analysisSpecifications = analysisSpecifications, + executionSettings = executionSettings + ) + ) +}) diff --git a/tests/testthat/test-ModuleInstantiation.R b/tests/testthat/test-ModuleInstantiation.R deleted file mode 100644 index bee17942..00000000 --- a/tests/testthat/test-ModuleInstantiation.R +++ /dev/null @@ -1,58 +0,0 @@ -test_that("Prevent multiple modules with different versions in analysis specification", { - analysisSpecifications <- ParallelLogger::loadSettingsFromJson( - fileName = system.file("testdata/unitTestAnalysisSpecification.json", - package = "Strategus" - ) - ) - - # Duplicate the module - analysisSpecifications$moduleSpecifications[[2]] <- analysisSpecifications$moduleSpecifications[[1]] - analysisSpecifications$moduleSpecifications[[2]]$version <- "x" - - expect_error( - ensureAllModulesInstantiated( - analysisSpecifications = analysisSpecifications - ) - ) -}) - -# TODO: We'd like to test this functionality but both methods require that -# the module is instantiated which is very time consuming. Instead these -# tests should mock the MetaData.json that exists in the instantiated -# modules so that these methods work faster. -# test_that("Enforce module dependencies", { -# analysisSpecifications <- ParallelLogger::loadSettingsFromJson( -# fileName = system.file("testdata/analysisSpecification.json", -# package = "Strategus" -# ) -# ) -# -# # Remove the cohort generator module which is a dependency for all -# # the other modules -# analysisSpecifications$moduleSpecifications <- analysisSpecifications$moduleSpecifications[-1] -# modules <- getModuleTable(analysisSpecifications, distinct = TRUE) -# -# expect_error( -# checkModuleDependencies( -# modules = modules, -# enforceModuleDependencies = TRUE -# ) -# ) -# }) -# -# test_that("Do not enforce module dependencies", { -# analysisSpecifications <- ParallelLogger::loadSettingsFromJson( -# fileName = system.file("testdata/unitTestAnalysisSpecification.json", -# package = "Strategus" -# ) -# ) -# -# modules <- getModuleTable(analysisSpecifications, distinct = TRUE) -# -# expect_silent( -# checkModuleDependencies( -# modules = modules, -# enforceModuleDependencies = FALSE -# ) -# ) -# }) diff --git a/tests/testthat/test-RenvHelpers.R b/tests/testthat/test-RenvHelpers.R deleted file mode 100644 index 01b89043..00000000 --- a/tests/testthat/test-RenvHelpers.R +++ /dev/null @@ -1,103 +0,0 @@ -test_that("Check renv.lock compare", { - compare <- Strategus::compareLockFiles( - filename1 = system.file("testdata/renv.lock", package = "Strategus"), - filename2 = system.file("testdata/renv.lock", package = "Strategus") - ) - expect_true(nrow(compare) > 0) - expect_true(nrow(compare[compare$lockfile1Version != compare$lockfile1Version, ]) == 0) -}) - -test_that("Check renv.lock sync detects no changes", { - expect_null( - Strategus::syncLockFile( - sourceOfTruthLockFileName = system.file("testdata/renv.lock", package = "Strategus"), - targetLockFileName = system.file("testdata/renv.lock", package = "Strategus") - ) - ) -}) - -test_that("Check renv.lock sync works", { - tempDir <- tempdir() - tempLockFile <- file.path(tempDir, "renv.lock") - on.exit(unlink(tempLockFile)) - file.copy( - from = system.file("testdata/renv.lock", package = "Strategus"), - to = file.path(tempDir, "renv.lock") - ) - # Get the expected value - lf <- renv::lockfile_read( - file = system.file("testdata/renv.lock", package = "Strategus") - ) - expectedVersion <- lf$Packages$zip$Version - oldVersion <- "0.0.1" - renv::record( - records = paste0("zip@", oldVersion), - lockfile = tempLockFile - ) - - # Confirm the update ahead of the test worked - tempLf <- renv::lockfile_read( - file = tempLockFile - ) - expect_equal(tempLf$Packages$zip$Version, oldVersion) - - # Perform the sync - Strategus::syncLockFile( - sourceOfTruthLockFileName = system.file("testdata/renv.lock", package = "Strategus"), - targetLockFileName = tempLockFile - ) - - # Confirm that the newer version from the source of truth was applied - tempLf <- renv::lockfile_read( - file = tempLockFile - ) - expect_equal(tempLf$Packages$zip$Version, expectedVersion) -}) - -test_that("Test renv.lock validation", { - tmp <- tempfile() - on.exit(unlink(tmp)) - file.copy( - from = system.file("testdata/renv.lock", package = "Strategus"), - to = tmp - ) - - # All tests must pass on our internal lock file - expect_message( - object = Strategus::validateLockFile( - filename = tmp - ), - regexp = "PASSED" - ) - - # Remove the mandatory dependency - tmpLockFile <- renv::lockfile_read(file = tmp) - tmpLockFile$Packages$renv <- NULL - invisible(renv::lockfile_write(lockfile = tmpLockFile, file = tmp)) - expect_message( - object = Strategus::validateLockFile( - filename = tmp - ), - regexp = "FAILED" - ) - - # Remove suggested packages - tmpLockFile$Packages$RSQLite <- NULL - invisible(renv::lockfile_write(lockfile = tmpLockFile, file = tmp)) - expect_message( - object = Strategus::validateLockFile( - filename = tmp - ), - regexp = "FAILED" - ) - - # Mess up the CohortGenerator package to use a "HEAD" ref - tmpLockFile$Packages$CohortGenerator$RemoteRef <- "HEAD" - invisible(renv::lockfile_write(lockfile = tmpLockFile, file = tmp)) - invisible(expect_message( - object = Strategus::validateLockFile( - filename = tmp - ), - regexp = "FAILED" - )) -}) diff --git a/tests/testthat/test-Settings.R b/tests/testthat/test-Settings.R index d5f8362d..40546614 100644 --- a/tests/testthat/test-Settings.R +++ b/tests/testthat/test-Settings.R @@ -1,104 +1,411 @@ test_that("Test analysis specification creation", { - cohortSharedResource <- list(id = 1) - class(cohortSharedResource) <- c("CohortDefinitionSharedResources", "SharedResources") + # Setup some test data ------------ + cohortDefinitionSet <- getCohortDefinitionSet( + settingsFileName = system.file("testdata/Cohorts.csv", package = "Strategus"), + jsonFolder = system.file("testdata/cohorts", package = "Strategus"), + sqlFolder = system.file("testdata/sql", package = "Strategus") + ) + subsetOperations <- list( + createDemographicSubset( + name = "Demographic Criteria", + ageMin = 18, + ageMax = 64 + ) + ) + subsetDef <- createCohortSubsetDefinition( + name = "test definition", + definitionId = 1, + subsetOperators = subsetOperations + ) + cohortDefinitionSet <- cohortDefinitionSet |> + addCohortSubsetDefinition(subsetDef) - negativeControlOutcomeCohortSharedResource <- list(id = 1) - class(negativeControlOutcomeCohortSharedResource) <- c("NegativeControlOutcomeSharedResources", "SharedResources") + ncoCohortSet <- readCsv(file = system.file("testdata/negative_controls_concept_set.csv", + package = "Strategus" + )) - moduleSpecifications <- list( - module = "CohortGeneratorModule", - version = "0.0.16", - remoteRepo = "github.com", - remoteUsername = "ohdsi", - settings = list() + # Exposures-outcomes + negativeControlOutcomeIds <- ncoCohortSet$cohortId + outcomeOfInterestIds <- c(3) + exposureOfInterestIds <- c(1, 2) + + # Test Module Settings ---------------------- + # Characterization ------------------------------- + cModuleSettingsCreator <- CharacterizationModule$new() + cModuleSpecifications <- cModuleSettingsCreator$createModuleSpecifications( + targetIds = c(1, 2), + outcomeIds = 3 ) - class(moduleSpecifications) <- c("CohortGeneratorModuleSpecifications", "ModuleSpecifications") - analysisSpecification <- createEmptyAnalysisSpecificiations() %>% - addSharedResources(cohortSharedResource) %>% - addSharedResources(negativeControlOutcomeCohortSharedResource) %>% - addModuleSpecifications(moduleSpecifications) + # Cohort Diagnostics ----------------- + cdModuleSettingsCreator <- CohortDiagnosticsModule$new() + cdModuleSpecifications <- cdModuleSettingsCreator$createModuleSpecifications( + runInclusionStatistics = TRUE, + runIncludedSourceConcepts = TRUE, + runOrphanConcepts = TRUE, + runTimeSeries = FALSE, + runVisitContext = TRUE, + runBreakdownIndexEvents = TRUE, + runIncidenceRate = TRUE, + runCohortRelationship = TRUE, + runTemporalCohortCharacterization = TRUE, + incremental = FALSE + ) - expect_equal(length(analysisSpecification$sharedResources), 2) - expect_equal(length(analysisSpecification$moduleSpecifications), 1) -}) + # Cohort Generator ----------------- + cgModuleSettingsCreator <- CohortGeneratorModule$new() -test_that("Create results execution settings", { - executionSettings <- createResultsExecutionSettings( - resultsConnectionDetailsReference = "test", - resultsDatabaseSchema = "test", - workFolder = tempfile(), - resultsFolder = tempfile(), - minCellCount = 5 + # Create the settings & validate them + cohortSharedResourcesSpecifications <- cgModuleSettingsCreator$createCohortSharedResourceSpecifications(cohortDefinitionSet) + cgModuleSettingsCreator$validateCohortSharedResourceSpecifications(cohortSharedResourcesSpecifications) + + ncoCohortSharedResourceSpecifications <- cgModuleSettingsCreator$createNegativeControlOutcomeCohortSharedResourceSpecifications(ncoCohortSet, "first", TRUE) + cgModuleSettingsCreator$validateNegativeControlOutcomeCohortSharedResourceSpecifications(ncoCohortSharedResourceSpecifications) + + cgModuleSpecifications <- cgModuleSettingsCreator$createModuleSpecifications() + + # Characterization ------------------------------- + cModuleSettingsCreator <- CharacterizationModule$new() + cModuleSpecifications <- cModuleSettingsCreator$createModuleSpecifications( + targetIds = c(1, 2), + outcomeIds = 3 ) - expect_equal(class(executionSettings), c("ResultsExecutionSettings", "ExecutionSettings")) -}) + # Cohort Incidence ----------------- + ciModuleSettingsCreator <- CohortIncidenceModule$new() + targets <- list( + CohortIncidence::createCohortRef(id = 1, name = "Celecoxib"), + CohortIncidence::createCohortRef(id = 2, name = "Diclofenac"), + CohortIncidence::createCohortRef(id = 4, name = "Celecoxib Age >= 30"), + CohortIncidence::createCohortRef(id = 5, name = "Diclofenac Age >= 30") + ) + outcomes <- list(CohortIncidence::createOutcomeDef(id = 1, name = "GI bleed", cohortId = 3, cleanWindow = 9999)) -test_that("Get module list", { - moduleList <- getModuleList() - expect_true(nrow(moduleList) > 0) -}) + tars <- list( + CohortIncidence::createTimeAtRiskDef(id = 1, startWith = "start", endWith = "end"), + CohortIncidence::createTimeAtRiskDef(id = 2, startWith = "start", endWith = "start", endOffset = 365) + ) + analysis1 <- CohortIncidence::createIncidenceAnalysis( + targets = c(1, 2, 4, 5), + outcomes = c(1), + tars = c(1, 2) + ) + + irDesign <- CohortIncidence::createIncidenceDesign( + targetDefs = targets, + outcomeDefs = outcomes, + tars = tars, + analysisList = list(analysis1), + strataSettings = CohortIncidence::createStrataSettings( + byYear = TRUE, + byGender = TRUE + ) + ) + + ciModuleSpecifications <- ciModuleSettingsCreator$createModuleSpecifications( + irDesign = irDesign$toList() + ) + + # Cohort Method ---------------------- + cmModuleSettingsCreator <- CohortMethodModule$new() + negativeControlOutcomes <- lapply( + X = ncoCohortSet$cohortId, + FUN = CohortMethod::createOutcome, + outcomeOfInterest = FALSE, + trueEffectSize = 1, + priorOutcomeLookback = 30 + ) + + outcomesOfInterest <- lapply( + X = 3, + FUN = CohortMethod::createOutcome, + outcomeOfInterest = TRUE + ) + + outcomes <- append( + negativeControlOutcomes, + outcomesOfInterest + ) + + tcos1 <- CohortMethod::createTargetComparatorOutcomes( + targetId = 1, + comparatorId = 2, + outcomes = outcomes, + excludedCovariateConceptIds = c(1118084, 1124300) + ) + tcos2 <- CohortMethod::createTargetComparatorOutcomes( + targetId = 4, + comparatorId = 5, + outcomes = outcomes, + excludedCovariateConceptIds = c(1118084, 1124300) + ) + + targetComparatorOutcomesList <- list(tcos1, tcos2) + + covarSettings <- FeatureExtraction::createDefaultCovariateSettings(addDescendantsToExclude = TRUE) + + getDbCmDataArgs <- CohortMethod::createGetDbCohortMethodDataArgs( + washoutPeriod = 183, + firstExposureOnly = TRUE, + removeDuplicateSubjects = "remove all", + maxCohortSize = 100000, + covariateSettings = covarSettings + ) + + createStudyPopArgs <- CohortMethod::createCreateStudyPopulationArgs( + minDaysAtRisk = 1, + riskWindowStart = 0, + startAnchor = "cohort start", + riskWindowEnd = 30, + endAnchor = "cohort end" + ) + + matchOnPsArgs <- CohortMethod::createMatchOnPsArgs() + fitOutcomeModelArgs <- CohortMethod::createFitOutcomeModelArgs(modelType = "cox") + createPsArgs <- CohortMethod::createCreatePsArgs( + stopOnError = FALSE, + control = Cyclops::createControl(cvRepetitions = 1) + ) + computeSharedCovBalArgs <- CohortMethod::createComputeCovariateBalanceArgs() + computeCovBalArgs <- CohortMethod::createComputeCovariateBalanceArgs( + covariateFilter = FeatureExtraction::getDefaultTable1Specifications() + ) + + cmAnalysis1 <- CohortMethod::createCmAnalysis( + analysisId = 1, + description = "No matching, simple outcome model", + getDbCohortMethodDataArgs = getDbCmDataArgs, + createStudyPopArgs = createStudyPopArgs, + fitOutcomeModelArgs = fitOutcomeModelArgs + ) + + cmAnalysis2 <- CohortMethod::createCmAnalysis( + analysisId = 2, + description = "Matching on ps and covariates, simple outcomeModel", + getDbCohortMethodDataArgs = getDbCmDataArgs, + createStudyPopArgs = createStudyPopArgs, + createPsArgs = createPsArgs, + matchOnPsArgs = matchOnPsArgs, + computeSharedCovariateBalanceArgs = computeSharedCovBalArgs, + computeCovariateBalanceArgs = computeCovBalArgs, + fitOutcomeModelArgs = fitOutcomeModelArgs + ) + + cmAnalysisList <- list(cmAnalysis1, cmAnalysis2) + + analysesToExclude <- NULL -test_that("Verify that unlocking keyring without password fails", { - allKeyrings <- keyring::keyring_list() - if (!keyringName %in% allKeyrings$keyring) { - keyring::keyring_create(keyring = keyringName, password = keyringPassword) + cmModuleSpecifications <- cmModuleSettingsCreator$createModuleSpecifications( + cmAnalysisList = cmAnalysisList, + targetComparatorOutcomesList = targetComparatorOutcomesList, + analysesToExclude = analysesToExclude + ) + + # EvidenceSythesis ------------------ + esModuleSettingsCreator = EvidenceSynthesisModule$new() + evidenceSynthesisSourceCm <- esModuleSettingsCreator$createEvidenceSynthesisSource( + sourceMethod = "CohortMethod", + likelihoodApproximation = "adaptive grid" + ) + metaAnalysisCm <- esModuleSettingsCreator$createBayesianMetaAnalysis( + evidenceSynthesisAnalysisId = 1, + alpha = 0.05, + evidenceSynthesisDescription = "Bayesian random-effects alpha 0.05 - adaptive grid", + evidenceSynthesisSource = evidenceSynthesisSourceCm + ) + evidenceSynthesisSourceSccs <- esModuleSettingsCreator$createEvidenceSynthesisSource( + sourceMethod = "SelfControlledCaseSeries", + likelihoodApproximation = "adaptive grid" + ) + metaAnalysisSccs <- esModuleSettingsCreator$createBayesianMetaAnalysis( + evidenceSynthesisAnalysisId = 2, + alpha = 0.05, + evidenceSynthesisDescription = "Bayesian random-effects alpha 0.05 - adaptive grid", + evidenceSynthesisSource = evidenceSynthesisSourceSccs + ) + evidenceSynthesisAnalysisList <- list(metaAnalysisCm, metaAnalysisSccs) + evidenceSynthesisAnalysisSpecifications <- esModuleSettingsCreator$createModuleSpecifications( + evidenceSynthesisAnalysisList + ) + # PatientLevelPrediction ------------------------------- + plpModuleSettingsCreator <- PatientLevelPredictionModule$new() + makeModelDesignSettings <- function(targetId, outcomeId, popSettings, covarSettings) { + invisible(PatientLevelPrediction::createModelDesign( + targetId = targetId, + outcomeId = outcomeId, + restrictPlpDataSettings = PatientLevelPrediction::createRestrictPlpDataSettings(), + populationSettings = popSettings, + covariateSettings = covarSettings, + preprocessSettings = PatientLevelPrediction::createPreprocessSettings(), + modelSettings = PatientLevelPrediction::setLassoLogisticRegression(), + splitSettings = PatientLevelPrediction::createDefaultSplitSetting(), + runCovariateSummary = T + )) } - # Lock the keyring - keyring::keyring_lock(keyring = keyringName) - # Remove STRATEGUS_KEYRING_PASSWORD in case it is already set - Sys.unsetenv("STRATEGUS_KEYRING_PASSWORD") + plpPopulationSettings <- PatientLevelPrediction::createStudyPopulationSettings( + startAnchor = "cohort start", + riskWindowStart = 1, + endAnchor = "cohort start", + riskWindowEnd = 365, + minTimeAtRisk = 1 + ) + plpCovarSettings <- FeatureExtraction::createDefaultCovariateSettings() - # Try to unlock and expect error - expect_error(unlockKeyring(keyring = keyringName)) -}) + modelDesignList <- list() + for (i in 1:length(exposureOfInterestIds)) { + for (j in 1:length(outcomeOfInterestIds)) { + modelDesignList <- append( + modelDesignList, + list( + makeModelDesignSettings( + targetId = exposureOfInterestIds[i], + outcomeId = outcomeOfInterestIds[j], + popSettings = plpPopulationSettings, + covarSettings = plpCovarSettings + ) + ) + ) + } + } -test_that("Store and retrieve connection details", { - # Setup keyring for the test - Sys.setenv("STRATEGUS_KEYRING_PASSWORD" = keyringPassword) - createKeyringForUnitTest(selectedKeyring = keyringName, selectedKeyringPassword = keyringPassword) - on.exit(deleteKeyringForUnitTest()) + plpModuleSpecifications <- plpModuleSettingsCreator$createModuleSpecifications( + modelDesignList = modelDesignList + ) - for (i in 1:length(connectionDetailsList)) { - connectionDetails <- connectionDetailsList[[i]]$connectionDetails - dbms <- connectionDetailsList[[i]]$connectionDetails$dbms + # SelfControlledCaseSeries ------------------------------- + sccsModuleSettingsCreator <- SelfControlledCaseSeriesModule$new() - message("************* Store connection details for ", dbms, " *************") + exposuresOutcomeList <- list() + for (exposureOfInterestId in exposureOfInterestIds) { + for (outcomeOfInterestId in outcomeOfInterestIds) { + exposuresOutcomeList[[length(exposuresOutcomeList) + 1]] <- SelfControlledCaseSeries::createExposuresOutcome( + outcomeId = outcomeOfInterestId, + exposures = list(SelfControlledCaseSeries::createExposure(exposureId = exposureOfInterestId)) + ) + } + for (negativeControlOutcomeId in negativeControlOutcomeIds) { + exposuresOutcomeList[[length(exposuresOutcomeList) + 1]] <- SelfControlledCaseSeries::createExposuresOutcome( + outcomeId = negativeControlOutcomeId, + exposures = list(SelfControlledCaseSeries::createExposure(exposureId = exposureOfInterestId, trueEffectSize = 1)) + ) + } + } - # Verify that the connection details are valid - # by connecting to the DB - conn <- DatabaseConnector::connect( - connectionDetails - ) - DatabaseConnector::disconnect(conn) + getDbSccsDataArgs <- SelfControlledCaseSeries::createGetDbSccsDataArgs( + studyStartDate = "", + studyEndDate = "", + maxCasesPerOutcome = 1e6, + useNestingCohort = TRUE, + nestingCohortId = 1, + deleteCovariatesSmallCount = 0 + ) - # Store the connection details in keyring - storeConnectionDetails( - connectionDetails = connectionDetails, - connectionDetailsReference = dbms, - keyringName = keyringName - ) + createStudyPopulation6AndOlderArgs <- SelfControlledCaseSeries::createCreateStudyPopulationArgs( + minAge = 18, + naivePeriod = 365 + ) - connectionDetailsFromKeyring <- retrieveConnectionDetails( - connectionDetailsReference = dbms, - keyringName = keyringName - ) + covarPreExp <- SelfControlledCaseSeries::createEraCovariateSettings( + label = "Pre-exposure", + includeEraIds = "exposureId", + start = -30, + end = -1, + endAnchor = "era start" + ) + + covarExposureOfInt <- SelfControlledCaseSeries::createEraCovariateSettings( + label = "Main", + includeEraIds = "exposureId", + start = 0, + startAnchor = "era start", + end = 0, + endAnchor = "era end", + profileLikelihood = TRUE, + exposureOfInterest = TRUE + ) + + calendarTimeSettings <- SelfControlledCaseSeries::createCalendarTimeCovariateSettings( + calendarTimeKnots = 5, + allowRegularization = TRUE, + computeConfidenceIntervals = FALSE + ) + + seasonalitySettings <- SelfControlledCaseSeries::createSeasonalityCovariateSettings( + seasonKnots = 5, + allowRegularization = TRUE, + computeConfidenceIntervals = FALSE + ) + + createSccsIntervalDataArgs <- SelfControlledCaseSeries::createCreateSccsIntervalDataArgs( + eraCovariateSettings = list(covarPreExp, covarExposureOfInt), + seasonalityCovariateSettings = seasonalitySettings, + calendarTimeCovariateSettings = calendarTimeSettings, + minCasesForTimeCovariates = 100000 + ) - # Verify that the connection details retrieved - # allow for a connection to the DB - connFromKeyring <- DatabaseConnector::connect( - connectionDetailsFromKeyring + fitSccsModelArgs <- SelfControlledCaseSeries::createFitSccsModelArgs( + control = Cyclops::createControl( + cvType = "auto", + selectorType = "byPid", + startingVariance = 0.1, + seed = 1, + resetCoefficients = TRUE, + noiseLevel = "quiet" ) - expect_silent(DatabaseConnector::disconnect(connFromKeyring)) - } + ) + + sccsAnalysis1 <- SelfControlledCaseSeries::createSccsAnalysis( + analysisId = 1, + description = "SCCS age 18-", + getDbSccsDataArgs = getDbSccsDataArgs, + createStudyPopulationArgs = createStudyPopulation6AndOlderArgs, + createIntervalDataArgs = createSccsIntervalDataArgs, + fitSccsModelArgs = fitSccsModelArgs + ) + + sccsAnalysisList <- list(sccsAnalysis1) + + sccsModuleSpecifications <- sccsModuleSettingsCreator$createModuleSpecifications( + sccsAnalysisList = sccsAnalysisList, + exposuresOutcomeList = exposuresOutcomeList, + combineDataFetchAcrossOutcomes = FALSE + ) + + + # Create analysis specifications --------------- + analysisSpecifications <- createEmptyAnalysisSpecificiations() |> + addSharedResources(cohortSharedResourcesSpecifications) |> + addSharedResources(ncoCohortSharedResourceSpecifications) |> + addCharacterizationModuleSpecifications(cModuleSpecifications) |> + addCohortDiagnosticsModuleSpecifications(cdModuleSpecifications) |> + addCohortGeneratorModuleSpecifications(cgModuleSpecifications) |> + addCohortIncidenceModuleSpecifications(ciModuleSpecifications) |> + addCohortMethodeModuleSpecifications(cmModuleSpecifications) |> + addEvidenceSynthesisModuleSpecifications(evidenceSynthesisAnalysisSpecifications) |> + addSelfControlledCaseSeriesModuleSpecifications(sccsModuleSpecifications) |> + addPatientLevelPredictionModuleSpecifications(plpModuleSpecifications) + + expect_equal(length(analysisSpecifications$sharedResources), 2) + expect_equal(length(analysisSpecifications$moduleSpecifications), 8) }) -test_that("Retrieve connection details that do not exists throws informative error", { - # Setup keyring for the test - Sys.setenv("STRATEGUS_KEYRING_PASSWORD" = keyringPassword) - createKeyringForUnitTest(selectedKeyring = keyringName, selectedKeyringPassword = keyringPassword) - on.exit(deleteKeyringForUnitTest()) - expect_error(retrieveConnectionDetails(connectionDetailsReference = "does-not-exist", keyringName = keyringName)) +test_that("Create results execution settings", { + executionSettings <- createResultsExecutionSettings( + resultsDatabaseSchema = "test", + workFolder = tempfile(), + resultsFolder = tempfile(), + minCellCount = 5 + ) + + expect_equal(class(executionSettings), c("ResultsExecutionSettings", "ExecutionSettings")) +}) + +test_that("Create results data model settings", { + settings <- createResultsDataModelSettings( + resultsDatabaseSchema = "test", + resultsFolder = tempfile() + ) + + expect_equal(class(settings), c("ResultsDataModelSettings")) }) diff --git a/tests/testthat/test-Strategus.R b/tests/testthat/test-Strategus.R deleted file mode 100644 index fbd25d33..00000000 --- a/tests/testthat/test-Strategus.R +++ /dev/null @@ -1,147 +0,0 @@ -test_that("Run unit test study", { - # NOTE: Need to set this in each test otherwise it goes out of scope - renv:::renv_scope_envvars(RENV_PATHS_CACHE = renvCachePath) - - # Setup keyring for the test - Sys.setenv("STRATEGUS_KEYRING_PASSWORD" = keyringPassword) - createKeyringForUnitTest(selectedKeyring = keyringName, selectedKeyringPassword = keyringPassword) - on.exit(deleteKeyringForUnitTest()) - - analysisSpecifications <- ParallelLogger::loadSettingsFromJson( - fileName = system.file("testdata/unitTestAnalysisSpecification.json", - package = "Strategus" - ) - ) - - withr::defer( - { - if (usingTempDir) { - unlink(file.path(tempDir, "EunomiaTestStudy"), recursive = TRUE, force = TRUE) - } - }, - testthat::teardown_env() - ) - - for (i in 1:length(connectionDetailsList)) { - connectionDetails <- connectionDetailsList[[i]]$connectionDetails - dbms <- connectionDetailsList[[i]]$connectionDetails$dbms - workDatabaseSchema <- connectionDetailsList[[i]]$workDatabaseSchema - cdmDatabaseSchema <- connectionDetailsList[[i]]$cdmDatabaseSchema - tempEmulationSchema <- connectionDetailsList[[i]]$tempEmulationSchema - cohortTableNames <- connectionDetailsList[[i]]$cohortTableNames - studyRootFolder <- file.path(tempDir, "EunomiaTestStudy", dbms) - workFolder <- file.path(studyRootFolder, "work_folder") - resultsFolder <- file.path(studyRootFolder, "results_folder") - scriptFolder <- file.path(studyRootFolder, "script_folder") - - message("************* Running Strategus on ", dbms, " *************") - - # Using a named and secured keyring - Strategus::storeConnectionDetails( - connectionDetails = connectionDetails, - connectionDetailsReference = dbms, - keyringName = keyringName - ) - - resultsConnectionDetailsReference <- NULL - resultsDatabaseSchema <- NULL - - # Only run this code if we're testing on SQLite - if (dbms == "sqlite") { - resultsConnectionDetailsReference <- "result-store" - resultsDatabaseSchema <- "main" - Strategus::storeConnectionDetails( - connectionDetails, - resultsConnectionDetailsReference, - keyringName = keyringName - ) - resultsExecutionSettings <- Strategus::createResultsExecutionSettings( - resultsConnectionDetailsReference = resultsConnectionDetailsReference, - resultsDatabaseSchema = resultsDatabaseSchema, - workFolder = workFolder, - resultsFolder = resultsFolder - ) - Strategus::createResultDataModels( - analysisSpecifications = analysisSpecifications, - executionSettings = resultsExecutionSettings, - keyringName = keyringName - ) - } - - executionSettings <- createCdmExecutionSettings( - connectionDetailsReference = dbms, - workDatabaseSchema = workDatabaseSchema, - cdmDatabaseSchema = cdmDatabaseSchema, - tempEmulationSchema = tempEmulationSchema, - cohortTableNames = cohortTableNames, - workFolder = workFolder, - resultsFolder = resultsFolder, - minCellCount = 5, - resultsDatabaseSchema = resultsDatabaseSchema, - resultsConnectionDetailsReference = resultsConnectionDetailsReference - ) - - if (!dir.exists(studyRootFolder)) { - dir.create(studyRootFolder, recursive = TRUE) - } - ParallelLogger::saveSettingsToJson( - object = executionSettings, - file.path(studyRootFolder, "eunomiaExecutionSettings.json") - ) - - executionSettings <- ParallelLogger::loadSettingsFromJson( - fileName = file.path(studyRootFolder, "eunomiaExecutionSettings.json") - ) - - Strategus::execute( - analysisSpecifications = analysisSpecifications, - executionSettings = executionSettings, - executionScriptFolder = scriptFolder, - keyringName = keyringName - ) - - expect_true(file.exists(file.path(resultsFolder, "TestModule1_1", "done"))) - } -}) - -test_that("Execute on Oracle stops if table names exceed length limit", { - sqlRenderTempEmulationSchema <- getOption("sqlRenderTempEmulationSchema", default = "") - options(sqlRenderTempEmulationSchema = "some_schema") - on.exit(options(sqlRenderTempEmulationSchema = sqlRenderTempEmulationSchema)) - - Sys.setenv("STRATEGUS_KEYRING_PASSWORD" = keyringPassword) - createKeyringForUnitTest(selectedKeyring = keyringName, selectedKeyringPassword = keyringPassword) - on.exit(deleteKeyringForUnitTest()) - - connectionDetails <- DatabaseConnector::createConnectionDetails( - dbms = "oracle" - ) - Strategus::storeConnectionDetails( - connectionDetails = connectionDetails, - connectionDetailsReference = "oracle-test", - keyringName = keyringName - ) - executionSettings <- Strategus::createCdmExecutionSettings( - connectionDetailsReference = "oracle-test", - workDatabaseSchema = "does_not_matter", - cdmDatabaseSchema = "does_not_matter", - cohortTableNames = CohortGenerator::getCohortTableNames("some_really_long_table_name_for_testing_that_oracle_throws_a_warning"), - workFolder = file.path(tempDir, "work_folder"), - resultsFolder = file.path(tempDir, "results_folder"), - minCellCount = 5 - ) - - analysisSpecifications <- ParallelLogger::loadSettingsFromJson( - fileName = system.file("testdata/unitTestAnalysisSpecification.json", - package = "Strategus" - ) - ) - - expect_error( - Strategus::execute( - analysisSpecifications = analysisSpecifications, - executionSettings = executionSettings, - keyringName = keyringName - ) - ) -}) diff --git a/vignettes/CreatingAnalysisSpecification.Rmd b/vignettes/CreatingAnalysisSpecification.Rmd deleted file mode 100644 index 5ad08532..00000000 --- a/vignettes/CreatingAnalysisSpecification.Rmd +++ /dev/null @@ -1,487 +0,0 @@ ---- -title: "Creating Analysis Specification" -author: "Anthony G. Sena" -date: "`r Sys.Date()`" -output: - pdf_document: - number_sections: yes - toc: yes - html_document: - number_sections: yes - toc: yes -params: - analysisSettingsPath: "D:/git/OHDSI/Strategus/inst/testdata" - analysisSettingsFileName: "analysisSpecification.json" -vignette: > - %\VignetteEngine{knitr::rmarkdown} - %\VignetteIndexEntry{Creating Analysis Specification} - %\VignetteEncoding{UTF-8} ---- - -```{r setup, include=FALSE, echo=FALSE, warning=FALSE} -library(Strategus) -library(dplyr) -library(knitr) -library(ParallelLogger) - -# # Work-around for this error: -# # Error in globalCallingHandlers(condition = conditionHandler) : -# # should not be called with handlers on the stack -options(loggerSettings = ParallelLogger:::getDefaultLoggerSettings()) -options(width = 200) -``` - - -# Creating an analysis specification - -This walk through will show how to use `Strategus` to define an analysis specification on an example study using cohorts from the example problem _What is the risk of gastrointestinal (GI) bleed in new users of celecoxib compared to new users of diclofenac?_ as described in the [Book Of OHDSI Chapter 12 on Population Level Estimation]( https://ohdsi.github.io/TheBookOfOhdsi/PopulationLevelEstimation.html#PopulationLevelEstimation) - -## Setting up your R environment - -To start, we must install the [HADES](https://ohdsi.github.io/Hades/) libraries in order to create the settings for the analysis specification. The following script will install the necessary HADES packages for this vignette. - -```{r, eval=FALSE} -# Install correct versions of HADES packages -remotes::install_github("ohdsi/CohortGenerator", ref = "v0.9.0") -remotes::install_github("ohdsi/CohortDiagnostics", ref = "v3.2.5") -remotes::install_github("ohdsi/Characterization", ref = "v0.2.0") -remotes::install_github("ohdsi/CohortIncidence", ref = "v3.3.0") -remotes::install_github("ohdsi/CohortMethod", ref = "v5.3.0") -remotes::install_github("ohdsi/SelfControlledCaseSeries", ref = "v5.2.0") -remotes::install_github("ohdsi/PatientLevelPrediction", ref = "v6.3.6") -``` - -## Cohorts for the study - -To start, we'll need to define cohorts and negative control outcomes to use in our example analysis specification. We've included the cohorts and negative control outcomes in the `Strategus` package for this example and the code below will load them for use when assembling the analysis specification. - -```{r, results='hide', eval=FALSE} -library(CohortGenerator) -cohortDefinitionSet <- getCohortDefinitionSet( - settingsFileName = "testdata/Cohorts.csv", - jsonFolder = "testdata/cohorts", - sqlFolder = "testdata/sql", - packageName = "Strategus" -) -ncoCohortSet <- readCsv(file = system.file("testdata/negative_controls_concept_set.csv", - package = "Strategus" -)) -``` - -### Cohort Definitions & Negative Control Outcomes - -This is the list of cohort definitions we will use when assembling the analysis specification for Strategus. - -```{r eval=FALSE} -kable(cohortDefinitionSet[, c("cohortId", "cohortName")]) -``` - -And the negative control outcomes when performing empirical calibration. - -```{r eval=FALSE} -kable(ncoCohortSet) -``` - - -# Assembling HADES modules - -The building blocks of the `Strategus` analysis specification are HADES modules. For purposes of this walk through, a module is a specific analytic task you would like to perform. As shown in the subsequent sections, the high-level pattern for using a module consists of: - -1. Download the module's settings function. -2. Create the module specifications using the settings function(s) from the module -3. Compose the analysis pipeline from one or more module settings - -## CohortGenerator Module Settings - -The following code downloads the settings functions from the `CohortGeneratorModule` which then activates some additional functions we can use for creating the analysis specification. In the analysis specification, we will add the cohort definitions and negative control outcomes to the `sharedResources` section since these elements may be used by any of the HADES modules. To do this, we will use the `createCohortSharedResourceSpecifications` and `createNegativeControlOutcomeCohortSharedResourceSpecifications` functions respectively. In addition, we will use the `cohortGeneratorModuleSpecifications` function to specify the cohort generation settings. - -```{r eval=FALSE} -source("https://raw.githubusercontent.com/OHDSI/CohortGeneratorModule/v0.4.1/SettingsFunctions.R") - -# Create the cohort definition shared resource element for the analysis specification -cohortDefinitionSharedResource <- createCohortSharedResourceSpecifications( - cohortDefinitionSet = cohortDefinitionSet -) - -# Create the negative control outcome shared resource element for the analysis specification -ncoSharedResource <- createNegativeControlOutcomeCohortSharedResourceSpecifications( - negativeControlOutcomeCohortSet = ncoCohortSet, - occurrenceType = "all", - detectOnDescendants = TRUE -) - -# Create the module specification -cohortGeneratorModuleSpecifications <- createCohortGeneratorModuleSpecifications( - incremental = TRUE, - generateStats = TRUE -) -``` - -## CohortDiagnostics Module Settings - -The following code creates the `cohortDiagnosticsModuleSpecifications` to run cohort diagnostics on the cohorts in the study. - -```{r eval=FALSE} -source("https://raw.githubusercontent.com/OHDSI/CohortDiagnosticsModule/v0.2.0/SettingsFunctions.R") -cohortDiagnosticsModuleSpecifications <- createCohortDiagnosticsModuleSpecifications( - runInclusionStatistics = TRUE, - runIncludedSourceConcepts = TRUE, - runOrphanConcepts = TRUE, - runTimeSeries = FALSE, - runVisitContext = TRUE, - runBreakdownIndexEvents = TRUE, - runIncidenceRate = TRUE, - runCohortRelationship = TRUE, - runTemporalCohortCharacterization = TRUE, - incremental = FALSE -) -``` - -## CohortIncidence Module Settings - -The following code creates the `cohortIncidenceModuleSpecifications` to perform an incidence rate analysis for the target cohorts and outcome in this study. - -```{r eval=FALSE} -source("https://raw.githubusercontent.com/OHDSI/CohortIncidenceModule/v0.4.1/SettingsFunctions.R") -library(CohortIncidence) -targets <- list( - createCohortRef(id = 1, name = "Celecoxib"), - createCohortRef(id = 2, name = "Diclofenac"), - createCohortRef(id = 4, name = "Celecoxib Age >= 30"), - createCohortRef(id = 5, name = "Diclofenac Age >= 30") -) -outcomes <- list(createOutcomeDef(id = 1, name = "GI bleed", cohortId = 3, cleanWindow = 9999)) - -tars <- list( - createTimeAtRiskDef(id = 1, startWith = "start", endWith = "end"), - createTimeAtRiskDef(id = 2, startWith = "start", endWith = "start", endOffset = 365) -) -analysis1 <- createIncidenceAnalysis( - targets = c(1, 2, 4, 5), - outcomes = c(1), - tars = c(1, 2) -) - -irDesign <- createIncidenceDesign( - targetDefs = targets, - outcomeDefs = outcomes, - tars = tars, - analysisList = list(analysis1), - strataSettings = createStrataSettings( - byYear = TRUE, - byGender = TRUE - ) -) - -cohortIncidenceModuleSpecifications <- createCohortIncidenceModuleSpecifications( - irDesign = irDesign$toList() -) -``` - -## Characterization Module Settings - -The following code creates the `characterizationModuleSpecifications` to perform an characterization analysis for the target cohorts and outcome in this study. - - -```{r eval=FALSE} -source("https://raw.githubusercontent.com/OHDSI/CharacterizationModule/v0.6.0/SettingsFunctions.R") -characterizationModuleSpecifications <- createCharacterizationModuleSpecifications( - targetIds = c(1, 2), - outcomeIds = 3, - covariateSettings = FeatureExtraction::createDefaultCovariateSettings(), - dechallengeStopInterval = 30, - dechallengeEvaluationWindow = 30, - timeAtRisk = data.frame( - riskWindowStart = c(1, 1), - startAnchor = c("cohort start", "cohort start"), - riskWindowEnd = c(0, 365), - endAnchor = c("cohort end", "cohort end") - ) -) -``` - - -## CohortMethod Module Settings - -The following code creates the `cohortMethodModuleSpecifications` to perform a comparative cohort analysis for this study. - -```{r eval=FALSE} -library(CohortMethod) -source("https://raw.githubusercontent.com/OHDSI/CohortMethodModule/v0.3.1/SettingsFunctions.R") -negativeControlOutcomes <- lapply( - X = ncoCohortSet$cohortId, - FUN = createOutcome, - outcomeOfInterest = FALSE, - trueEffectSize = 1, - priorOutcomeLookback = 30 -) - -outcomesOfInterest <- lapply( - X = 3, - FUN = createOutcome, - outcomeOfInterest = TRUE -) - -outcomes <- append( - negativeControlOutcomes, - outcomesOfInterest -) - -tcos1 <- CohortMethod::createTargetComparatorOutcomes( - targetId = 1, - comparatorId = 2, - outcomes = outcomes, - excludedCovariateConceptIds = c(1118084, 1124300) -) -tcos2 <- CohortMethod::createTargetComparatorOutcomes( - targetId = 4, - comparatorId = 5, - outcomes = outcomes, - excludedCovariateConceptIds = c(1118084, 1124300) -) - -targetComparatorOutcomesList <- list(tcos1, tcos2) - -covarSettings <- FeatureExtraction::createDefaultCovariateSettings(addDescendantsToExclude = TRUE) - -getDbCmDataArgs <- CohortMethod::createGetDbCohortMethodDataArgs( - washoutPeriod = 183, - firstExposureOnly = TRUE, - removeDuplicateSubjects = "remove all", - maxCohortSize = 100000, - covariateSettings = covarSettings -) - -createStudyPopArgs <- CohortMethod::createCreateStudyPopulationArgs( - minDaysAtRisk = 1, - riskWindowStart = 0, - startAnchor = "cohort start", - riskWindowEnd = 30, - endAnchor = "cohort end" -) - -matchOnPsArgs <- CohortMethod::createMatchOnPsArgs() -fitOutcomeModelArgs <- CohortMethod::createFitOutcomeModelArgs(modelType = "cox") -createPsArgs <- CohortMethod::createCreatePsArgs( - stopOnError = FALSE, - control = Cyclops::createControl(cvRepetitions = 1) -) -computeSharedCovBalArgs <- CohortMethod::createComputeCovariateBalanceArgs() -computeCovBalArgs <- CohortMethod::createComputeCovariateBalanceArgs( - covariateFilter = FeatureExtraction::getDefaultTable1Specifications() -) - -cmAnalysis1 <- CohortMethod::createCmAnalysis( - analysisId = 1, - description = "No matching, simple outcome model", - getDbCohortMethodDataArgs = getDbCmDataArgs, - createStudyPopArgs = createStudyPopArgs, - fitOutcomeModelArgs = fitOutcomeModelArgs -) - -cmAnalysis2 <- CohortMethod::createCmAnalysis( - analysisId = 2, - description = "Matching on ps and covariates, simple outcomeModel", - getDbCohortMethodDataArgs = getDbCmDataArgs, - createStudyPopArgs = createStudyPopArgs, - createPsArgs = createPsArgs, - matchOnPsArgs = matchOnPsArgs, - computeSharedCovariateBalanceArgs = computeSharedCovBalArgs, - computeCovariateBalanceArgs = computeCovBalArgs, - fitOutcomeModelArgs = fitOutcomeModelArgs -) - -cmAnalysisList <- list(cmAnalysis1, cmAnalysis2) - -analysesToExclude <- NULL - - -cohortMethodModuleSpecifications <- createCohortMethodModuleSpecifications( - cmAnalysisList = cmAnalysisList, - targetComparatorOutcomesList = targetComparatorOutcomesList, - analysesToExclude = analysesToExclude -) -``` - - -## SelfControlledCaseSeries Module Settings - -The following code creates the `cohortMethodModuleSpecifications` to perform a comparative cohort analysis for this study. - -```{r eval=FALSE} -library(SelfControlledCaseSeries) -source("https://raw.githubusercontent.com/OHDSI/SelfControlledCaseSeriesModule/v0.5.0/SettingsFunctions.R") - -# Exposures-outcomes ----------------------------------------------------------- -negativeControlOutcomeIds <- ncoCohortSet$cohortId -outcomeOfInterestIds <- c(3) -exposureOfInterestIds <- c(1, 2) - -exposuresOutcomeList <- list() -for (exposureOfInterestId in exposureOfInterestIds) { - for (outcomeOfInterestId in outcomeOfInterestIds) { - exposuresOutcomeList[[length(exposuresOutcomeList) + 1]] <- createExposuresOutcome( - outcomeId = outcomeOfInterestId, - exposures = list(createExposure(exposureId = exposureOfInterestId)) - ) - } - for (negativeControlOutcomeId in negativeControlOutcomeIds) { - exposuresOutcomeList[[length(exposuresOutcomeList) + 1]] <- createExposuresOutcome( - outcomeId = negativeControlOutcomeId, - exposures = list(createExposure(exposureId = exposureOfInterestId, trueEffectSize = 1)) - ) - } -} - -# Analysis settings ------------------------------------------------------------ -getDbSccsDataArgs <- SelfControlledCaseSeries::createGetDbSccsDataArgs( - studyStartDate = "", - studyEndDate = "", - maxCasesPerOutcome = 1e6, - useNestingCohort = TRUE, - nestingCohortId = 1, - deleteCovariatesSmallCount = 0 -) - -createStudyPopulation6AndOlderArgs <- SelfControlledCaseSeries::createCreateStudyPopulationArgs( - minAge = 18, - naivePeriod = 365 -) - -covarPreExp <- SelfControlledCaseSeries::createEraCovariateSettings( - label = "Pre-exposure", - includeEraIds = "exposureId", - start = -30, - end = -1, - endAnchor = "era start" -) - -covarExposureOfInt <- SelfControlledCaseSeries::createEraCovariateSettings( - label = "Main", - includeEraIds = "exposureId", - start = 0, - startAnchor = "era start", - end = 0, - endAnchor = "era end", - profileLikelihood = TRUE, - exposureOfInterest = TRUE -) - -calendarTimeSettings <- SelfControlledCaseSeries::createCalendarTimeCovariateSettings( - calendarTimeKnots = 5, - allowRegularization = TRUE, - computeConfidenceIntervals = FALSE -) - -seasonalitySettings <- SelfControlledCaseSeries::createSeasonalityCovariateSettings( - seasonKnots = 5, - allowRegularization = TRUE, - computeConfidenceIntervals = FALSE -) - -createSccsIntervalDataArgs <- SelfControlledCaseSeries::createCreateSccsIntervalDataArgs( - eraCovariateSettings = list(covarPreExp, covarExposureOfInt), - seasonalityCovariateSettings = seasonalitySettings, - calendarTimeCovariateSettings = calendarTimeSettings, - minCasesForTimeCovariates = 100000 -) - -fitSccsModelArgs <- SelfControlledCaseSeries::createFitSccsModelArgs( - control = Cyclops::createControl( - cvType = "auto", - selectorType = "byPid", - startingVariance = 0.1, - seed = 1, - resetCoefficients = TRUE, - noiseLevel = "quiet" - ) -) - -sccsAnalysis1 <- SelfControlledCaseSeries::createSccsAnalysis( - analysisId = 1, - description = "SCCS age 18-", - getDbSccsDataArgs = getDbSccsDataArgs, - createStudyPopulationArgs = createStudyPopulation6AndOlderArgs, - createIntervalDataArgs = createSccsIntervalDataArgs, - fitSccsModelArgs = fitSccsModelArgs -) - -sccsAnalysisList <- list(sccsAnalysis1) - -# SCCS module specs ------------------------------------------------------------ -sccsModuleSpecifications <- creatSelfControlledCaseSeriesModuleSpecifications( - sccsAnalysisList = sccsAnalysisList, - exposuresOutcomeList = exposuresOutcomeList, - combineDataFetchAcrossOutcomes = FALSE -) -``` - -## PatientLevelPrediction Module Settings - -The following code creates the `plpModuleSpecifications` to perform a self-controlled case series analysis for this study. - -```{r eval=FALSE} -source("https://raw.githubusercontent.com/OHDSI/PatientLevelPredictionModule/v0.3.0/SettingsFunctions.R") - -makeModelDesignSettings <- function(targetId, outcomeId, popSettings, covarSettings) { - invisible(PatientLevelPrediction::createModelDesign( - targetId = targetId, - outcomeId = outcomeId, - restrictPlpDataSettings = PatientLevelPrediction::createRestrictPlpDataSettings(), - populationSettings = popSettings, - covariateSettings = covarSettings, - preprocessSettings = PatientLevelPrediction::createPreprocessSettings(), - modelSettings = PatientLevelPrediction::setLassoLogisticRegression(), - splitSettings = PatientLevelPrediction::createDefaultSplitSetting(), - runCovariateSummary = T - )) -} - -plpPopulationSettings <- PatientLevelPrediction::createStudyPopulationSettings( - startAnchor = "cohort start", - riskWindowStart = 1, - endAnchor = "cohort start", - riskWindowEnd = 365, - minTimeAtRisk = 1 -) -plpCovarSettings <- FeatureExtraction::createDefaultCovariateSettings() - -modelDesignList <- list() -for (i in 1:length(exposureOfInterestIds)) { - for (j in 1:length(outcomeOfInterestIds)) { - modelDesignList <- append( - modelDesignList, - list( - makeModelDesignSettings( - targetId = exposureOfInterestIds[i], - outcomeId = outcomeOfInterestIds[j], - popSettings = plpPopulationSettings, - covarSettings = plpCovarSettings - ) - ) - ) - } -} - -plpModuleSpecifications <- createPatientLevelPredictionModuleSpecifications(modelDesignList = modelDesignList) -``` - -# Strategus Analysis Specifications - -Finally, we will use the various shared resources and module specifications to construct the full set of analysis specifications and save it to the file system in JSON format. - -```{r eval=FALSE} -analysisSpecifications <- createEmptyAnalysisSpecificiations() %>% - addSharedResources(cohortDefinitionSharedResource) %>% - addSharedResources(ncoSharedResource) %>% - addModuleSpecifications(cohortGeneratorModuleSpecifications) %>% - addModuleSpecifications(cohortDiagnosticsModuleSpecifications) %>% - addModuleSpecifications(cohortIncidenceModuleSpecifications) %>% - addModuleSpecifications(characterizationModuleSpecifications) %>% - addModuleSpecifications(cohortMethodModuleSpecifications) %>% - addModuleSpecifications(sccsModuleSpecifications) %>% - addModuleSpecifications(plpModuleSpecifications) - -ParallelLogger::saveSettingsToJson(analysisSpecifications, file.path(params$analysisSettingsPath, params$analysisSettingsFileName)) -``` - diff --git a/vignettes/CreatingModules.Rmd b/vignettes/CreatingModules.Rmd deleted file mode 100644 index ce93dabc..00000000 --- a/vignettes/CreatingModules.Rmd +++ /dev/null @@ -1,139 +0,0 @@ ---- -title: "Creating Strategus Modules" -author: "Anthony G. Sena" -date: "`r Sys.Date()`" -output: - pdf_document: - number_sections: yes - toc: yes - html_document: - number_sections: yes - toc: yes -vignette: > - %\VignetteEngine{knitr::rmarkdown} - %\VignetteIndexEntry{Creating Strategus Module} - %\VignetteEncoding{UTF-8} ---- - -```{r setup, include=FALSE} -knitr::opts_chunk$set(echo = TRUE) -skeletonFolder <- ifelse(basename(getwd()) == "vignettes", - "module_skeleton", - file.path("vignettes", "module_skeleton") -) -``` - -# Background - -This document aims to document the steps necessary to create analytic module that is compatible with [Strategus](https://github.com/OHDSI/Strategus). Please treat this document as a **work in progress** as Strategus is still under development. - - -# Getting Started - -A Strategus analytic module is an R Project that uses [renv](https://rstudio.github.io/renv/index.html). **NOTE**: Please make sure you are using renv > 1.0.0 when creating a new analytic module to make sure it is compatible with Strategus. - -A Strategus module will contain the following files: - -```{r comment='', echo=FALSE} -fs::dir_tree(skeletonFolder, recurse = TRUE) -``` -Here we will detail how each file is used by Strategus and what is required in the contents of the file. - -## Creating YourProjectModule.Rproj and activating `renv` - -This is the R Project (.Rproj) file for your module and should end in "Module". You may create this as a standard R Project via RStudio. Once the project is created, please use `renv::init()` to set up the `renv` folder as shown above. This will create the necessary `.RProfile` in the root of your project and the `renv` subfolder with the necessary R code for `renv`'s operations. - -## README.md - -This is a standard README markdown file that describes the module. - -## NEWS.md - -This is a standard NEWS markdown file that is a change log for your module. See [this post](https://blog.r-hub.io/2020/05/08/pkg-news/) for more information. - -## MetaData.json - -MetaData.json holds information that describes the module and its dependencies: - -```{r comment='', echo=FALSE} -cat(readLines(file.path(skeletonFolder, "MetaData.json")), sep = "\n") -``` -To detail the contents of the JSON file: - -- **Name**: The name of the module -- **Version**: The version of the module. This should have a corresponding git tag in the repository when the module is released otherwise Strategus will not be able to download it. -- **Dependencies**: A list of modules that are required to have successfully executed **BEFORE** this module is executed. If there are no dependencies, leave this as an empty array `[]`. -- **TablePrefix**: The prefix to use when creating the results tables in the `resultsDataModelSpecification.csv`. Please see [Main.R] for more information on how this value is used. - -## Main.R - -This file holds the main executable for your module. This file must contain a function called `execute(jobContext)`. - -```{r comment='', echo=FALSE} -cat(readLines(file.path(skeletonFolder, "Main.R")), sep = "\n") -``` - -As shown in the code above, your `execute(jobContext)` should handle: validating the `jobContext` object to ensure it has all of the information necessary for your code to function, a section to execute the analytics and finally code to assemble the output. Here we will describe the requirements for the way in which your module must output its results: - -- A single .ZIP file is created that holds all of the result files as described below. -- Output files are required to be in .CSV format. Use CohortGenerator v0.5.0 or higher which contains a helper function for `writeCsv()` which will ensure your output is formatted properly. For more information, please see: https://ohdsi.github.io/CohortGenerator/reference/writeCsv.html. **IMPORTANT:** File names _must_ correspond to the table names that are specified in the `resultsModuleSpecification.csv`. -- You must include a file named `resultsModuleSpecification.csv` in your output directory. The format of this file is as follows: - -```{r comment='', echo=FALSE} -cat(readLines(file.path(skeletonFolder, "resources", "resultsDataModelSpecification.csv")), sep = "\n") -``` -The `resultsModuleSpecification.csv` has the following columns: - -- **table_name**: The table name to use to hold the data. -- **column_name**: The column name in the table. -- **data_type**: The data type for the column. See https://www.postgresql.org/docs/current/datatype.html for examples. -- **is_required**: Will this column allow for NULL values? Yes/No -- **primary_key**: Is this column part of the table's primary key? Yes/No - -## SettingsFunctions.R - -This file contains one or more functions required to create the module settings for use in Strategus. We plan to later remove this requirement when we can describe the module specification using the [OpenAPI 3.0 Specification](https://swagger.io/specification/). For now, your module should contain a function similar to the following: - -```{r comment='', echo=FALSE} -cat(readLines(file.path(skeletonFolder, "SettingsFunctions.R")), sep = "\n") -``` - -As shown above, this example comes from the `CohortGeneratorModule` and the function name reflects the fact that the function will create the settings used to dictate the behavior of the module. The parameters of the function will differ based on the requirements of your module - if there are choices to be made when running your module, you should include these as parameters to your module specification function. - -Internal to the function above, the formal parameters to the function are used to construct a `list()` named `analysis` which holds the analysis settings. Next the `MetaData.json` file is used to obtain the module name/version for inclusion in the `specifications` list. The `specifications` list contains the `remoteRepo` and `remoteUsername` properties to indicate where your module is stored on GitHub. Finally, we set the `class()` of the `specifications` object to `c("CohortGeneratorModuleSpecifications", "ModuleSpecifications")`. For your module, you will want to substitute `"CohortGeneratorModuleSpecifications"` for the name of your module and retain the `"ModuleSpecifications"` in the vector. - -The following JSON fragment shows how the output of `createCohortGeneratorModuleSpecifications()` is used in the `moduleSpecifications` section of the overall analysis settings JSON for Strategus: - -```{r comment='', echo=FALSE} -cat(readLines(file.path(skeletonFolder, "resources", "exampleAnalysisSpecifications.json")), sep = "\n") -``` - -## renv.lock - -Each module will make use of [renv](https://rstudio.github.io/renv/articles/renv.html) to capture its R package dependencies. Furthermore, Strategus will make use of the `renv` settings in your module to create a run-time environment when executing your module to ensure all of the necessary dependencies are available to your module. - -It is recommended to use the HADES-wide renv.lock file which is available at https://github.com/OHDSI/Hades/blob/main/hadesWideReleases. Find the most recent release based on the folder name and copy the `renv.lock` file into the root of your module project. - -If you need to install additional dependencies for your project, use `renv::record()` to record it in the lock file. - -# Extra files - -It is advisable to add an `extras` folder to your project to include other useful files for managing and testing your module. We'll detail those files here: - -## ModuleMaintenance.R - -This file is used to store utility functions for your module, such as the code mentioned earlier for generating the [renv.lock] file. Here is an example of the contents of ModuleMaintenance.R: - -```{r comment='', echo=FALSE} -cat(readLines(file.path(skeletonFolder, "extras", "ModuleMaintenance.R")), sep = "\n") -``` -## Test Files - -The following file is used to create a test jobContext for use in the `execute(jobContext)` as described in [Main.R]: - -https://github.com/OHDSI/CohortGeneratorModule/blob/main/extras/test/CreateJobContext.R - -And the following file is used to create a test harness for running your module: - -https://github.com/OHDSI/CohortGeneratorModule/blob/main/extras/test/TestModuleStandalone.R - diff --git a/vignettes/ExecuteStrategus.Rmd b/vignettes/ExecuteStrategus.Rmd deleted file mode 100644 index c1f5c996..00000000 --- a/vignettes/ExecuteStrategus.Rmd +++ /dev/null @@ -1,128 +0,0 @@ ---- -title: "Execute Strategus" -author: "Anthony G. Sena" -date: "`r Sys.Date()`" -output: - pdf_document: - number_sections: yes - toc: yes - html_document: - number_sections: yes - toc: yes -vignette: > - %\VignetteEngine{knitr::rmarkdown} - %\VignetteIndexEntry{Execute Strategus} - %\VignetteEncoding{UTF-8} -always_allow_html: true ---- - -```{r setup, include=FALSE} -knitr::opts_chunk$set(echo = TRUE) -``` - -A Strategus study is defined by *analysis specifications*. These specifications describe which modules to run, with which settings. The 'Creating Analysis Specification' vignette describes how to create analysis specifications. In this vignette, we demonstrate how to run a study once it is specified. - -# Creating execution settings - -In addition to analysis specifications, Strategus also requires *execution settings*. The execution settings specify how the study should be executed in a specific environment, for example how to connect to a database, and what local folders to use. Many Strategus studies run against data in the OMOP Common Data Model (CDM), and in this vignette we focus on this type of studies. (Other studies, such as meta-analyses, may run against results data instead). In this example, we will make use of the [Eunomia](https://github.com/ohdsi/eunomia) data set which is an OMOP CDM with simulated data used for example purposes. When running a study against your own CDM data, you will need to specify the database connection details for your environment. Execution settings for studies against the CDM can be created using `createCdmExecutionSettings()`. - -Note that Strategus uses [keyring](https://r-lib.github.io/keyring/index.html) to store the connection information securely, so that sensitive information is not stored directly in the `executionSettings` variable. - -## Storing the connection details - -In this example, we first create a `connectionDetails` reference for Eunomia. In your environment, the `connectionDetails` would be specific to your OMOP CDM connection. - -```{r eval=FALSE} -library(Strategus) -folder <- "c:/strategus" -``` -```{r echo=FALSE, warning=FALSE, message=FALSE} -library(Strategus) -outputFolder <- tempfile("vignetteFolder") -dir.create(outputFolder) -``` - -```{r} -# Note: specifying the `databaseFile` so the Eunomia database is permanent, not temporary: -connectionDetails <- Eunomia::getEunomiaConnectionDetails( - databaseFile = file.path(outputFolder, "cdm.sqlite") -) -``` - -Next we will use `Strategus` to store the connection details and provide a `connectionDetailsReference` that Strategus will use to look up the connection details. - -```{r eval=FALSE} -storeConnectionDetails( - connectionDetails = connectionDetails, - connectionDetailsReference = "eunomia" -) -``` -Note that we only need to store connection details once. We can re-use these details in all future studies. - -## Creating an execution settings object - -Next, we will use `Strategus` to create the CDM execution settings. The `connectionDetailsReference` refers to the connection details we stored earlier: - -```{r} -executionSettings <- createCdmExecutionSettings( - connectionDetailsReference = "eunomia", - workDatabaseSchema = "main", - cdmDatabaseSchema = "main", - cohortTableNames = CohortGenerator::getCohortTableNames(), - workFolder = file.path(outputFolder, "work_folder"), - resultsFolder = file.path(outputFolder, "results_folder"), - minCellCount = 5 -) -``` - -Finally, we can write out the execution settings to the file system to capture this information. - -```{r} -ParallelLogger::saveSettingsToJson( - object = executionSettings, - file.path(outputFolder, "eunomiaExecutionSettings.json") -) -``` - -# Executing the study - -## Specifying the instantiated modules folder - -Strategus studies use modules to perform the analytics, and each module has a version. Different studies can use the same modules, but a single study can only use one version of a particular module. These modules need to be instantiated before they can be executed, a sometimes time-consuming task. We must specify a global location where these modules will be instantiated so that, once a module is instantiated, it can be used in all future studies: - -```{r eval=FALSE} -Sys.setenv("INSTANTIATED_MODULES_FOLDER" = "c:/strategus/modules") -``` - -We recommend adding this environmental variable to your `.renviron` file, so it is always set. - -## Running the study - -For this study, we will use analysis specifications created elsewhere, and the execution settings we created earlier: - -```{r} -analysisSpecifications <- ParallelLogger::loadSettingsFromJson( - fileName = system.file("testdata/analysisSpecification.json", - package = "Strategus" - ) -) - -executionSettings <- ParallelLogger::loadSettingsFromJson( - fileName = file.path(outputFolder, "eunomiaExecutionSettings.json") -) -``` - -And finally we execute the study: - -```{r eval=FALSE} -execute( - analysisSpecifications = analysisSpecifications, - executionSettings = executionSettings, - executionScriptFolder = file.path(outputFolder, "script_folder") -) -``` -This will first instantiate all the modules if they haven't already been instantiated, and will then execute each module in sequence according to the analysis specifications. The results will appear in subfolders of the 'results_folder', as specified in the execution settings. - -```{r echo=FALSE} -unlink(outputFolder, recursive = TRUE) -``` diff --git a/vignettes/module_skeleton/.Rprofile b/vignettes/module_skeleton/.Rprofile deleted file mode 100644 index 81b960f5..00000000 --- a/vignettes/module_skeleton/.Rprofile +++ /dev/null @@ -1 +0,0 @@ -source("renv/activate.R") diff --git a/vignettes/module_skeleton/Main.R b/vignettes/module_skeleton/Main.R deleted file mode 100644 index c2a13ee5..00000000 --- a/vignettes/module_skeleton/Main.R +++ /dev/null @@ -1,16 +0,0 @@ -execute <- function(jobContext) { - # VALIDATE THE jobContext - rlang::inform("Validating inputs") - - # YOUR VALIDATION CODE GOES HERE.... - - # EXECUTE THE ANALYTICS - rlang::inform("Executing") - - # YOUR EXECUTION CODE GOES HERE.... - - # ASSEMBLE AND .ZIP THE RESULTS - rlang::inform("Export data") - - # YOUR CODE GOES HERE.... -} diff --git a/vignettes/module_skeleton/MetaData.json b/vignettes/module_skeleton/MetaData.json deleted file mode 100644 index 0460d12d..00000000 --- a/vignettes/module_skeleton/MetaData.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "Name": "CohortGeneratorModule", - "Version": "0.0.1", - "Dependencies": ["SomePrerequisiteModule"], - "TablePrefix": "cg_" -} diff --git a/vignettes/module_skeleton/NEWS.md b/vignettes/module_skeleton/NEWS.md deleted file mode 100644 index 8fd5bcf9..00000000 --- a/vignettes/module_skeleton/NEWS.md +++ /dev/null @@ -1,4 +0,0 @@ -CohortGeneratorModule 0.0.1 -======================= - -Initial version \ No newline at end of file diff --git a/vignettes/module_skeleton/README.md b/vignettes/module_skeleton/README.md deleted file mode 100644 index 33382e3f..00000000 --- a/vignettes/module_skeleton/README.md +++ /dev/null @@ -1,50 +0,0 @@ -# CohortGeneratorModule - -# Introduction - -This R project contains functionality for generating cohorts using a JSON specification that is -under development. This module design then uses CohortGenerator and other OHDSI packages to generate -the cohorts based on the specification. - -# Example - -Please see the code located in `extras/CodeToRunModule.R` for an example of using the module. - -# Technology - -CohortGeneratorModule is an R project. - -# System requirements - -Requires R (version 4.1.3 or higher). - -# Getting Started - -1. Make sure your R environment is properly configured. This means that Java must be installed. See [these instructions](https://ohdsi.github.io/Hades/rSetup.html) for how to configure your R environment. - -2. TODO - need to describe in context of Strategus - - -# User Documentation - -Coming Soon - -# Support - -Please use the GitHub bug tracker - -# Contributing - -Read [here](https://ohdsi.github.io/Hades/contribute.html) how you can contribute to this package. - -# License - -CohortGeneratorModule is licensed under Apache License 2.0 - -# Development - -This package is being developed in RStudio. - -### Development status - -Beta diff --git a/vignettes/module_skeleton/SettingsFunctions.R b/vignettes/module_skeleton/SettingsFunctions.R deleted file mode 100644 index 252b62c5..00000000 --- a/vignettes/module_skeleton/SettingsFunctions.R +++ /dev/null @@ -1,18 +0,0 @@ -createCohortGeneratorModuleSpecifications <- function(incremental = TRUE, - generateStats = TRUE) { - analysis <- list() - for (name in names(formals(createCohortGeneratorModuleSpecifications))) { - analysis[[name]] <- get(name) - } - - checkmate::assert_file_exists("MetaData.json") - moduleInfo <- ParallelLogger::loadSettingsFromJson("MetaData.json") - - specifications <- list(module = moduleInfo$Name, - version = moduleInfo$Version, - remoteRepo = "github.com", - remoteUsername = "ohdsi", - settings = analysis) - class(specifications) <- c("CohortGeneratorModuleSpecifications", "ModuleSpecifications") - return(specifications) -} diff --git a/vignettes/module_skeleton/YourProjectModule.Rproj b/vignettes/module_skeleton/YourProjectModule.Rproj deleted file mode 100644 index eaa6b818..00000000 --- a/vignettes/module_skeleton/YourProjectModule.Rproj +++ /dev/null @@ -1,18 +0,0 @@ -Version: 1.0 - -RestoreWorkspace: Default -SaveWorkspace: Default -AlwaysSaveHistory: Default - -EnableCodeIndexing: Yes -UseSpacesForTab: Yes -NumSpacesForTab: 2 -Encoding: UTF-8 - -RnwWeave: Sweave -LaTeX: pdfLaTeX - -BuildType: Package -PackageUseDevtools: Yes -PackageInstallArgs: --no-multiarch --with-keep.source -PackageRoxygenize: rd,collate,namespace diff --git a/vignettes/module_skeleton/extras/CreateJobContextForTesting.R b/vignettes/module_skeleton/extras/CreateJobContextForTesting.R deleted file mode 100644 index 1ad1dac5..00000000 --- a/vignettes/module_skeleton/extras/CreateJobContextForTesting.R +++ /dev/null @@ -1,143 +0,0 @@ -# Create a job context for testing purposes -library(Strategus) -library(dplyr) -source("SettingsFunctions.R") - -# Generic Helpers ---------------------------- -getModuleInfo <- function() { - checkmate::assert_file_exists("MetaData.json") - return(ParallelLogger::loadSettingsFromJson("MetaData.json")) -} - -# Sample Data Helpers ---------------------------- -getSampleCohortDefintionSet <- function() { - sampleCohorts <- CohortGenerator::createEmptyCohortDefinitionSet() - cohortJsonFiles <- list.files(path = system.file("testdata/name/cohorts", package = "CohortGenerator"), full.names = TRUE) - for (i in 1:length(cohortJsonFiles)) { - cohortJsonFileName <- cohortJsonFiles[i] - cohortName <- tools::file_path_sans_ext(basename(cohortJsonFileName)) - cohortJson <- readChar(cohortJsonFileName, file.info(cohortJsonFileName)$size) - sampleCohorts <- rbind(sampleCohorts, data.frame( - cohortId = as.double(i), - cohortName = cohortName, - json = cohortJson, - sql = "", - stringsAsFactors = FALSE - )) - } - - # Add subsets to the cohort definition set - maleOnlySubsetOperators <- list( - CohortGenerator::createDemographicSubset( - name = "Gender == Male", - gender = 8507 - ) - ) - maleOnlySubsetDef <- CohortGenerator::createCohortSubsetDefinition( - name = "Males", - definitionId = 1, - subsetOperators = maleOnlySubsetOperators - ) - # Define a subset for males age 40+ - maleAgeBoundedSubsetOperators <- list( - CohortGenerator::createDemographicSubset( - name = "Gender == Male, Age 40+", - gender = 8507, - ageMin = 40 - ) - ) - maleAgeBoundedSubsetDef <- CohortGenerator::createCohortSubsetDefinition( - name = "Male, Age 40+", - definitionId = 2, - subsetOperators = maleAgeBoundedSubsetOperators - ) - - subsetDef1 <- CohortGenerator::createCohortSubsetDefinition( - name = "Celecoxib new users, male >= 18, any exposure to celecoxib", - definitionId = 3, - subsetOperators = list( - CohortGenerator::createCohortSubset( - name = "Restrict to those with prior celecoxib", - cohortIds = 1, - negate = FALSE, - cohortCombinationOperator = "all", - startWindow = CohortGenerator::createSubsetCohortWindow(-99999, 99999, "cohortStart"), - endWindow = CohortGenerator::createSubsetCohortWindow(-99999, 99999, "cohortStart") - ), - CohortGenerator::createLimitSubset( - name = "Earlist event", - priorTime = 365, - followUpTime = 1, - limitTo = "firstEver" - ), - CohortGenerator::createDemographicSubset( - name = "Male and age 18+", - ageMin = 18, - gender = 8507 - ) - ) - ) - - - sampleCohorts <- sampleCohorts %>% - CohortGenerator::addCohortSubsetDefinition(maleOnlySubsetDef) %>% - CohortGenerator::addCohortSubsetDefinition(maleAgeBoundedSubsetDef) %>% - CohortGenerator::addCohortSubsetDefinition(subsetDef1) - return(sampleCohorts) -} - -createCohortSharedResource <- function(cohortDefinitionSet = getSampleCohortDefintionSet()) { - sharedResource <- createCohortSharedResourceSpecifications(cohortDefinitionSet = cohortDefinitionSet) - return(sharedResource) -} - -createNegativeControlSharedResource <- function() { - negativeControlOutcomes <- readCsv(file = system.file("testdata/negativecontrols/negativecontrolOutcomes.csv", - package = "CohortGenerator", - mustWork = TRUE - )) - negativeControlOutcomes$cohortId <- negativeControlOutcomes$outcomeConceptId - createNegativeControlOutcomeCohortSharedResourceSpecifications( - negativeControlOutcomeCohortSet = negativeControlOutcomes, - occurrenceType = "all", - detectOnDescendants = FALSE - ) -} - -# Create CohortGeneratorModule settings --------------------------------------- -cohortGeneratorModuleSpecifications <- createCohortGeneratorModuleSpecifications( - incremental = FALSE, - generateStats = TRUE -) - -# Module Settings Spec ---------------------------- -analysisSpecifications <- createEmptyAnalysisSpecificiations() %>% - addSharedResources(createCohortSharedResource()) %>% - addSharedResources(createNegativeControlSharedResource()) %>% - addModuleSpecifications(cohortGeneratorModuleSpecifications) - -# executionSettings <- Strategus::createExecutionSettings( -executionSettings <- Strategus::createCdmExecutionSettings( - connectionDetailsReference = "dummy", - workDatabaseSchema = "main", - cdmDatabaseSchema = "main", - cohortTableNames = CohortGenerator::getCohortTableNames(cohortTable = "cohort"), - workFolder = "dummy", - resultsFolder = "dummy", - minCellCount = 5 -) - -# Job Context ---------------------------- -module <- "CohortGeneratorModule" -moduleIndex <- 1 -moduleExecutionSettings <- executionSettings -moduleExecutionSettings$workSubFolder <- "dummy" -moduleExecutionSettings$resultsSubFolder <- "dummy" -moduleExecutionSettings$databaseId <- 123 -jobContext <- list( - sharedResources = analysisSpecifications$sharedResources, - settings = analysisSpecifications$moduleSpecifications[[moduleIndex]]$settings, - moduleExecutionSettings = moduleExecutionSettings -) -saveRDS(jobContext, "tests/testJobContext.rds") -# ParallelLogger::saveSettingsToJson(analysisSpecifications, fileName = "extras/analysisSettings.json") diff --git a/vignettes/module_skeleton/extras/ModuleMaintenance.R b/vignettes/module_skeleton/extras/ModuleMaintenance.R deleted file mode 100644 index e5ee2791..00000000 --- a/vignettes/module_skeleton/extras/ModuleMaintenance.R +++ /dev/null @@ -1,14 +0,0 @@ -# Format and check code: -styler::style_dir() -OhdsiRTools::updateCopyrightYearFolder() -OhdsiRTools::findNonAsciiStringsInFolder() -devtools::spell_check() - -# Generate renv lock file and activate renv: -OhdsiRTools::createRenvLockFile( - rootPackage = "CohortGenerator", - includeRootPackage = TRUE, - mode = "description", - additionalRequiredPackages = c("checkmate", "CirceR") -) -renv::init() diff --git a/vignettes/module_skeleton/renv.lock b/vignettes/module_skeleton/renv.lock deleted file mode 100644 index 41386505..00000000 --- a/vignettes/module_skeleton/renv.lock +++ /dev/null @@ -1,321 +0,0 @@ -{ - "R" : { - "Version" : "4.1.3", - "Repositories" : [ - { - "Name" : "CRAN", - "URL" : "https://cloud.r-project.org" - } - ] - }, - "Packages" : { - "glue" : { - "Package" : "glue", - "Version" : "1.6.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "cli" : { - "Package" : "cli", - "Version" : "3.3.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "rlang" : { - "Package" : "rlang", - "Version" : "1.0.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "crayon" : { - "Package" : "crayon", - "Version" : "1.5.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "ellipsis" : { - "Package" : "ellipsis", - "Version" : "0.3.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "fansi" : { - "Package" : "fansi", - "Version" : "1.0.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "lifecycle" : { - "Package" : "lifecycle", - "Version" : "1.0.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "magrittr" : { - "Package" : "magrittr", - "Version" : "2.0.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "pkgconfig" : { - "Package" : "pkgconfig", - "Version" : "2.0.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "utf8" : { - "Package" : "utf8", - "Version" : "1.2.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "vctrs" : { - "Package" : "vctrs", - "Version" : "0.4.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "bit" : { - "Package" : "bit", - "Version" : "4.0.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "cpp11" : { - "Package" : "cpp11", - "Version" : "0.4.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "hms" : { - "Package" : "hms", - "Version" : "1.1.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "pillar" : { - "Package" : "pillar", - "Version" : "1.7.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "prettyunits" : { - "Package" : "prettyunits", - "Version" : "1.1.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "purrr" : { - "Package" : "purrr", - "Version" : "0.3.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "R6" : { - "Package" : "R6", - "Version" : "2.5.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "bit64" : { - "Package" : "bit64", - "Version" : "4.0.5", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "progress" : { - "Package" : "progress", - "Version" : "1.2.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "Rcpp" : { - "Package" : "Rcpp", - "Version" : "1.0.8.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "tibble" : { - "Package" : "tibble", - "Version" : "3.1.6", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "tidyselect" : { - "Package" : "tidyselect", - "Version" : "1.1.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "tzdb" : { - "Package" : "tzdb", - "Version" : "0.3.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "withr" : { - "Package" : "withr", - "Version" : "2.5.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "clipr" : { - "Package" : "clipr", - "Version" : "0.8.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "rJava" : { - "Package" : "rJava", - "Version" : "1.0-6", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "stringi" : { - "Package" : "stringi", - "Version" : "1.7.6", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "triebeard" : { - "Package" : "triebeard", - "Version" : "0.3.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "vroom" : { - "Package" : "vroom", - "Version" : "1.5.7", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "backports" : { - "Package" : "backports", - "Version" : "1.4.1", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "DBI" : { - "Package" : "DBI", - "Version" : "1.1.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "generics" : { - "Package" : "generics", - "Version" : "0.1.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "jsonlite" : { - "Package" : "jsonlite", - "Version" : "1.8.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "readr" : { - "Package" : "readr", - "Version" : "2.1.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "snow" : { - "Package" : "snow", - "Version" : "0.4-4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "SqlRender" : { - "Package" : "SqlRender", - "Version" : "1.9.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "stringr" : { - "Package" : "stringr", - "Version" : "1.4.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "urltools" : { - "Package" : "urltools", - "Version" : "1.7.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "xml2" : { - "Package" : "xml2", - "Version" : "1.3.3", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "checkmate" : { - "Package" : "checkmate", - "Version" : "2.1.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "DatabaseConnector" : { - "Package" : "DatabaseConnector", - "Version" : "5.0.2", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "digest" : { - "Package" : "digest", - "Version" : "0.6.29", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "dplyr" : { - "Package" : "dplyr", - "Version" : "1.0.8", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "lubridate" : { - "Package" : "lubridate", - "Version" : "1.8.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "ParallelLogger" : { - "Package" : "ParallelLogger", - "Version" : "3.0.0", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "RJSONIO" : { - "Package" : "RJSONIO", - "Version" : "1.3-1.6", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "renv" : { - "Package" : "renv", - "Version" : "0.15.4", - "Source" : "Repository", - "Repository" : "CRAN" - }, - "CirceR" : { - "Package" : "CirceR", - "Version" : "1.1.1", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "CirceR", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "v1.1.1" - }, - "CohortGenerator" : { - "Package" : "CohortGenerator", - "Version" : "0.5.0", - "Source" : "GitHub", - "RemoteType" : "github", - "RemoteHost" : "api.github.com", - "RemoteRepo" : "CohortGenerator", - "RemoteUsername" : "ohdsi", - "RemoteRef" : "develop" - } - } -} diff --git a/vignettes/module_skeleton/renv/.gitignore b/vignettes/module_skeleton/renv/.gitignore deleted file mode 100644 index 22a0d01d..00000000 --- a/vignettes/module_skeleton/renv/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -sandbox/ -library/ -local/ -cellar/ -lock/ -python/ -staging/ diff --git a/vignettes/module_skeleton/renv/activate.R b/vignettes/module_skeleton/renv/activate.R deleted file mode 100644 index 2969c732..00000000 --- a/vignettes/module_skeleton/renv/activate.R +++ /dev/null @@ -1,1201 +0,0 @@ - -local({ - - # the requested version of renv - version <- "1.0.2" - attr(version, "sha") <- NULL - - # the project directory - project <- getwd() - - # use start-up diagnostics if enabled - diagnostics <- Sys.getenv("RENV_STARTUP_DIAGNOSTICS", unset = "FALSE") - if (diagnostics) { - start <- Sys.time() - profile <- tempfile("renv-startup-", fileext = ".Rprof") - utils::Rprof(profile) - on.exit({ - utils::Rprof(NULL) - elapsed <- signif(difftime(Sys.time(), start, units = "auto"), digits = 2L) - writeLines(sprintf("- renv took %s to run the autoloader.", format(elapsed))) - writeLines(sprintf("- Profile: %s", profile)) - print(utils::summaryRprof(profile)) - }, add = TRUE) - } - - # figure out whether the autoloader is enabled - enabled <- local({ - - # first, check config option - override <- getOption("renv.config.autoloader.enabled") - if (!is.null(override)) - return(override) - - # next, check environment variables - # TODO: prefer using the configuration one in the future - envvars <- c( - "RENV_CONFIG_AUTOLOADER_ENABLED", - "RENV_AUTOLOADER_ENABLED", - "RENV_ACTIVATE_PROJECT" - ) - - for (envvar in envvars) { - envval <- Sys.getenv(envvar, unset = NA) - if (!is.na(envval)) - return(tolower(envval) %in% c("true", "t", "1")) - } - - # enable by default - TRUE - - }) - - if (!enabled) - return(FALSE) - - # avoid recursion - if (identical(getOption("renv.autoloader.running"), TRUE)) { - warning("ignoring recursive attempt to run renv autoloader") - return(invisible(TRUE)) - } - - # signal that we're loading renv during R startup - options(renv.autoloader.running = TRUE) - on.exit(options(renv.autoloader.running = NULL), add = TRUE) - - # signal that we've consented to use renv - options(renv.consent = TRUE) - - # load the 'utils' package eagerly -- this ensures that renv shims, which - # mask 'utils' packages, will come first on the search path - library(utils, lib.loc = .Library) - - # unload renv if it's already been loaded - if ("renv" %in% loadedNamespaces()) - unloadNamespace("renv") - - # load bootstrap tools - `%||%` <- function(x, y) { - if (is.null(x)) y else x - } - - catf <- function(fmt, ..., appendLF = TRUE) { - - quiet <- getOption("renv.bootstrap.quiet", default = FALSE) - if (quiet) - return(invisible()) - - msg <- sprintf(fmt, ...) - cat(msg, file = stdout(), sep = if (appendLF) "\n" else "") - - invisible(msg) - - } - - header <- function(label, - ..., - prefix = "#", - suffix = "-", - n = min(getOption("width"), 78)) - { - label <- sprintf(label, ...) - n <- max(n - nchar(label) - nchar(prefix) - 2L, 8L) - if (n <= 0) - return(paste(prefix, label)) - - tail <- paste(rep.int(suffix, n), collapse = "") - paste0(prefix, " ", label, " ", tail) - - } - - startswith <- function(string, prefix) { - substring(string, 1, nchar(prefix)) == prefix - } - - bootstrap <- function(version, library) { - - friendly <- renv_bootstrap_version_friendly(version) - section <- header(sprintf("Bootstrapping renv %s", friendly)) - catf(section) - - # attempt to download renv - catf("- Downloading renv ... ", appendLF = FALSE) - withCallingHandlers( - tarball <- renv_bootstrap_download(version), - error = function(err) { - catf("FAILED") - stop("failed to download:\n", conditionMessage(err)) - } - ) - catf("OK") - on.exit(unlink(tarball), add = TRUE) - - # now attempt to install - catf("- Installing renv ... ", appendLF = FALSE) - withCallingHandlers( - status <- renv_bootstrap_install(version, tarball, library), - error = function(err) { - catf("FAILED") - stop("failed to install:\n", conditionMessage(err)) - } - ) - catf("OK") - - # add empty line to break up bootstrapping from normal output - catf("") - - return(invisible()) - } - - renv_bootstrap_tests_running <- function() { - getOption("renv.tests.running", default = FALSE) - } - - renv_bootstrap_repos <- function() { - - # get CRAN repository - cran <- getOption("renv.repos.cran", "https://cloud.r-project.org") - - # check for repos override - repos <- Sys.getenv("RENV_CONFIG_REPOS_OVERRIDE", unset = NA) - if (!is.na(repos)) { - - # check for RSPM; if set, use a fallback repository for renv - rspm <- Sys.getenv("RSPM", unset = NA) - if (identical(rspm, repos)) - repos <- c(RSPM = rspm, CRAN = cran) - - return(repos) - - } - - # check for lockfile repositories - repos <- tryCatch(renv_bootstrap_repos_lockfile(), error = identity) - if (!inherits(repos, "error") && length(repos)) - return(repos) - - # retrieve current repos - repos <- getOption("repos") - - # ensure @CRAN@ entries are resolved - repos[repos == "@CRAN@"] <- cran - - # add in renv.bootstrap.repos if set - default <- c(FALLBACK = "https://cloud.r-project.org") - extra <- getOption("renv.bootstrap.repos", default = default) - repos <- c(repos, extra) - - # remove duplicates that might've snuck in - dupes <- duplicated(repos) | duplicated(names(repos)) - repos[!dupes] - - } - - renv_bootstrap_repos_lockfile <- function() { - - lockpath <- Sys.getenv("RENV_PATHS_LOCKFILE", unset = "renv.lock") - if (!file.exists(lockpath)) - return(NULL) - - lockfile <- tryCatch(renv_json_read(lockpath), error = identity) - if (inherits(lockfile, "error")) { - warning(lockfile) - return(NULL) - } - - repos <- lockfile$R$Repositories - if (length(repos) == 0) - return(NULL) - - keys <- vapply(repos, `[[`, "Name", FUN.VALUE = character(1)) - vals <- vapply(repos, `[[`, "URL", FUN.VALUE = character(1)) - names(vals) <- keys - - return(vals) - - } - - renv_bootstrap_download <- function(version) { - - sha <- attr(version, "sha", exact = TRUE) - - methods <- if (!is.null(sha)) { - - # attempting to bootstrap a development version of renv - c( - function() renv_bootstrap_download_tarball(sha), - function() renv_bootstrap_download_github(sha) - ) - - } else { - - # attempting to bootstrap a release version of renv - c( - function() renv_bootstrap_download_tarball(version), - function() renv_bootstrap_download_cran_latest(version), - function() renv_bootstrap_download_cran_archive(version) - ) - - } - - for (method in methods) { - path <- tryCatch(method(), error = identity) - if (is.character(path) && file.exists(path)) - return(path) - } - - stop("All download methods failed") - - } - - renv_bootstrap_download_impl <- function(url, destfile) { - - mode <- "wb" - - # https://bugs.r-project.org/bugzilla/show_bug.cgi?id=17715 - fixup <- - Sys.info()[["sysname"]] == "Windows" && - substring(url, 1L, 5L) == "file:" - - if (fixup) - mode <- "w+b" - - args <- list( - url = url, - destfile = destfile, - mode = mode, - quiet = TRUE - ) - - if ("headers" %in% names(formals(utils::download.file))) - args$headers <- renv_bootstrap_download_custom_headers(url) - - do.call(utils::download.file, args) - - } - - renv_bootstrap_download_custom_headers <- function(url) { - - headers <- getOption("renv.download.headers") - if (is.null(headers)) - return(character()) - - if (!is.function(headers)) - stopf("'renv.download.headers' is not a function") - - headers <- headers(url) - if (length(headers) == 0L) - return(character()) - - if (is.list(headers)) - headers <- unlist(headers, recursive = FALSE, use.names = TRUE) - - ok <- - is.character(headers) && - is.character(names(headers)) && - all(nzchar(names(headers))) - - if (!ok) - stop("invocation of 'renv.download.headers' did not return a named character vector") - - headers - - } - - renv_bootstrap_download_cran_latest <- function(version) { - - spec <- renv_bootstrap_download_cran_latest_find(version) - type <- spec$type - repos <- spec$repos - - baseurl <- utils::contrib.url(repos = repos, type = type) - ext <- if (identical(type, "source")) - ".tar.gz" - else if (Sys.info()[["sysname"]] == "Windows") - ".zip" - else - ".tgz" - name <- sprintf("renv_%s%s", version, ext) - url <- paste(baseurl, name, sep = "/") - - destfile <- file.path(tempdir(), name) - status <- tryCatch( - renv_bootstrap_download_impl(url, destfile), - condition = identity - ) - - if (inherits(status, "condition")) - return(FALSE) - - # report success and return - destfile - - } - - renv_bootstrap_download_cran_latest_find <- function(version) { - - # check whether binaries are supported on this system - binary <- - getOption("renv.bootstrap.binary", default = TRUE) && - !identical(.Platform$pkgType, "source") && - !identical(getOption("pkgType"), "source") && - Sys.info()[["sysname"]] %in% c("Darwin", "Windows") - - types <- c(if (binary) "binary", "source") - - # iterate over types + repositories - for (type in types) { - for (repos in renv_bootstrap_repos()) { - - # retrieve package database - db <- tryCatch( - as.data.frame( - utils::available.packages(type = type, repos = repos), - stringsAsFactors = FALSE - ), - error = identity - ) - - if (inherits(db, "error")) - next - - # check for compatible entry - entry <- db[db$Package %in% "renv" & db$Version %in% version, ] - if (nrow(entry) == 0) - next - - # found it; return spec to caller - spec <- list(entry = entry, type = type, repos = repos) - return(spec) - - } - } - - # if we got here, we failed to find renv - fmt <- "renv %s is not available from your declared package repositories" - stop(sprintf(fmt, version)) - - } - - renv_bootstrap_download_cran_archive <- function(version) { - - name <- sprintf("renv_%s.tar.gz", version) - repos <- renv_bootstrap_repos() - urls <- file.path(repos, "src/contrib/Archive/renv", name) - destfile <- file.path(tempdir(), name) - - for (url in urls) { - - status <- tryCatch( - renv_bootstrap_download_impl(url, destfile), - condition = identity - ) - - if (identical(status, 0L)) - return(destfile) - - } - - return(FALSE) - - } - - renv_bootstrap_download_tarball <- function(version) { - - # if the user has provided the path to a tarball via - # an environment variable, then use it - tarball <- Sys.getenv("RENV_BOOTSTRAP_TARBALL", unset = NA) - if (is.na(tarball)) - return() - - # allow directories - if (dir.exists(tarball)) { - name <- sprintf("renv_%s.tar.gz", version) - tarball <- file.path(tarball, name) - } - - # bail if it doesn't exist - if (!file.exists(tarball)) { - - # let the user know we weren't able to honour their request - fmt <- "- RENV_BOOTSTRAP_TARBALL is set (%s) but does not exist." - msg <- sprintf(fmt, tarball) - warning(msg) - - # bail - return() - - } - - catf("- Using local tarball '%s'.", tarball) - tarball - - } - - renv_bootstrap_download_github <- function(version) { - - enabled <- Sys.getenv("RENV_BOOTSTRAP_FROM_GITHUB", unset = "TRUE") - if (!identical(enabled, "TRUE")) - return(FALSE) - - # prepare download options - pat <- Sys.getenv("GITHUB_PAT") - if (nzchar(Sys.which("curl")) && nzchar(pat)) { - fmt <- "--location --fail --header \"Authorization: token %s\"" - extra <- sprintf(fmt, pat) - saved <- options("download.file.method", "download.file.extra") - options(download.file.method = "curl", download.file.extra = extra) - on.exit(do.call(base::options, saved), add = TRUE) - } else if (nzchar(Sys.which("wget")) && nzchar(pat)) { - fmt <- "--header=\"Authorization: token %s\"" - extra <- sprintf(fmt, pat) - saved <- options("download.file.method", "download.file.extra") - options(download.file.method = "wget", download.file.extra = extra) - on.exit(do.call(base::options, saved), add = TRUE) - } - - url <- file.path("https://api.github.com/repos/rstudio/renv/tarball", version) - name <- sprintf("renv_%s.tar.gz", version) - destfile <- file.path(tempdir(), name) - - status <- tryCatch( - renv_bootstrap_download_impl(url, destfile), - condition = identity - ) - - if (!identical(status, 0L)) - return(FALSE) - - renv_bootstrap_download_augment(destfile) - - return(destfile) - - } - - # Add Sha to DESCRIPTION. This is stop gap until #890, after which we - # can use renv::install() to fully capture metadata. - renv_bootstrap_download_augment <- function(destfile) { - sha <- renv_bootstrap_git_extract_sha1_tar(destfile) - if (is.null(sha)) { - return() - } - - # Untar - tempdir <- tempfile("renv-github-") - on.exit(unlink(tempdir, recursive = TRUE), add = TRUE) - untar(destfile, exdir = tempdir) - pkgdir <- dir(tempdir, full.names = TRUE)[[1]] - - # Modify description - desc_path <- file.path(pkgdir, "DESCRIPTION") - desc_lines <- readLines(desc_path) - remotes_fields <- c( - "RemoteType: github", - "RemoteHost: api.github.com", - "RemoteRepo: renv", - "RemoteUsername: rstudio", - "RemotePkgRef: rstudio/renv", - paste("RemoteRef: ", sha), - paste("RemoteSha: ", sha) - ) - writeLines(c(desc_lines[desc_lines != ""], remotes_fields), con = desc_path) - - # Re-tar - local({ - old <- setwd(tempdir) - on.exit(setwd(old), add = TRUE) - - tar(destfile, compression = "gzip") - }) - invisible() - } - - # Extract the commit hash from a git archive. Git archives include the SHA1 - # hash as the comment field of the tarball pax extended header - # (see https://www.kernel.org/pub/software/scm/git/docs/git-archive.html) - # For GitHub archives this should be the first header after the default one - # (512 byte) header. - renv_bootstrap_git_extract_sha1_tar <- function(bundle) { - - # open the bundle for reading - # We use gzcon for everything because (from ?gzcon) - # > Reading from a connection which does not supply a 'gzip' magic - # > header is equivalent to reading from the original connection - conn <- gzcon(file(bundle, open = "rb", raw = TRUE)) - on.exit(close(conn)) - - # The default pax header is 512 bytes long and the first pax extended header - # with the comment should be 51 bytes long - # `52 comment=` (11 chars) + 40 byte SHA1 hash - len <- 0x200 + 0x33 - res <- rawToChar(readBin(conn, "raw", n = len)[0x201:len]) - - if (grepl("^52 comment=", res)) { - sub("52 comment=", "", res) - } else { - NULL - } - } - - renv_bootstrap_install <- function(version, tarball, library) { - - # attempt to install it into project library - dir.create(library, showWarnings = FALSE, recursive = TRUE) - output <- renv_bootstrap_install_impl(library, tarball) - - # check for successful install - status <- attr(output, "status") - if (is.null(status) || identical(status, 0L)) - return(status) - - # an error occurred; report it - header <- "installation of renv failed" - lines <- paste(rep.int("=", nchar(header)), collapse = "") - text <- paste(c(header, lines, output), collapse = "\n") - stop(text) - - } - - renv_bootstrap_install_impl <- function(library, tarball) { - - # invoke using system2 so we can capture and report output - bin <- R.home("bin") - exe <- if (Sys.info()[["sysname"]] == "Windows") "R.exe" else "R" - R <- file.path(bin, exe) - - args <- c( - "--vanilla", "CMD", "INSTALL", "--no-multiarch", - "-l", shQuote(path.expand(library)), - shQuote(path.expand(tarball)) - ) - - system2(R, args, stdout = TRUE, stderr = TRUE) - - } - - renv_bootstrap_platform_prefix <- function() { - - # construct version prefix - version <- paste(R.version$major, R.version$minor, sep = ".") - prefix <- paste("R", numeric_version(version)[1, 1:2], sep = "-") - - # include SVN revision for development versions of R - # (to avoid sharing platform-specific artefacts with released versions of R) - devel <- - identical(R.version[["status"]], "Under development (unstable)") || - identical(R.version[["nickname"]], "Unsuffered Consequences") - - if (devel) - prefix <- paste(prefix, R.version[["svn rev"]], sep = "-r") - - # build list of path components - components <- c(prefix, R.version$platform) - - # include prefix if provided by user - prefix <- renv_bootstrap_platform_prefix_impl() - if (!is.na(prefix) && nzchar(prefix)) - components <- c(prefix, components) - - # build prefix - paste(components, collapse = "/") - - } - - renv_bootstrap_platform_prefix_impl <- function() { - - # if an explicit prefix has been supplied, use it - prefix <- Sys.getenv("RENV_PATHS_PREFIX", unset = NA) - if (!is.na(prefix)) - return(prefix) - - # if the user has requested an automatic prefix, generate it - auto <- Sys.getenv("RENV_PATHS_PREFIX_AUTO", unset = NA) - if (auto %in% c("TRUE", "True", "true", "1")) - return(renv_bootstrap_platform_prefix_auto()) - - # empty string on failure - "" - - } - - renv_bootstrap_platform_prefix_auto <- function() { - - prefix <- tryCatch(renv_bootstrap_platform_os(), error = identity) - if (inherits(prefix, "error") || prefix %in% "unknown") { - - msg <- paste( - "failed to infer current operating system", - "please file a bug report at https://github.com/rstudio/renv/issues", - sep = "; " - ) - - warning(msg) - - } - - prefix - - } - - renv_bootstrap_platform_os <- function() { - - sysinfo <- Sys.info() - sysname <- sysinfo[["sysname"]] - - # handle Windows + macOS up front - if (sysname == "Windows") - return("windows") - else if (sysname == "Darwin") - return("macos") - - # check for os-release files - for (file in c("/etc/os-release", "/usr/lib/os-release")) - if (file.exists(file)) - return(renv_bootstrap_platform_os_via_os_release(file, sysinfo)) - - # check for redhat-release files - if (file.exists("/etc/redhat-release")) - return(renv_bootstrap_platform_os_via_redhat_release()) - - "unknown" - - } - - renv_bootstrap_platform_os_via_os_release <- function(file, sysinfo) { - - # read /etc/os-release - release <- utils::read.table( - file = file, - sep = "=", - quote = c("\"", "'"), - col.names = c("Key", "Value"), - comment.char = "#", - stringsAsFactors = FALSE - ) - - vars <- as.list(release$Value) - names(vars) <- release$Key - - # get os name - os <- tolower(sysinfo[["sysname"]]) - - # read id - id <- "unknown" - for (field in c("ID", "ID_LIKE")) { - if (field %in% names(vars) && nzchar(vars[[field]])) { - id <- vars[[field]] - break - } - } - - # read version - version <- "unknown" - for (field in c("UBUNTU_CODENAME", "VERSION_CODENAME", "VERSION_ID", "BUILD_ID")) { - if (field %in% names(vars) && nzchar(vars[[field]])) { - version <- vars[[field]] - break - } - } - - # join together - paste(c(os, id, version), collapse = "-") - - } - - renv_bootstrap_platform_os_via_redhat_release <- function() { - - # read /etc/redhat-release - contents <- readLines("/etc/redhat-release", warn = FALSE) - - # infer id - id <- if (grepl("centos", contents, ignore.case = TRUE)) - "centos" - else if (grepl("redhat", contents, ignore.case = TRUE)) - "redhat" - else - "unknown" - - # try to find a version component (very hacky) - version <- "unknown" - - parts <- strsplit(contents, "[[:space:]]")[[1L]] - for (part in parts) { - - nv <- tryCatch(numeric_version(part), error = identity) - if (inherits(nv, "error")) - next - - version <- nv[1, 1] - break - - } - - paste(c("linux", id, version), collapse = "-") - - } - - renv_bootstrap_library_root_name <- function(project) { - - # use project name as-is if requested - asis <- Sys.getenv("RENV_PATHS_LIBRARY_ROOT_ASIS", unset = "FALSE") - if (asis) - return(basename(project)) - - # otherwise, disambiguate based on project's path - id <- substring(renv_bootstrap_hash_text(project), 1L, 8L) - paste(basename(project), id, sep = "-") - - } - - renv_bootstrap_library_root <- function(project) { - - prefix <- renv_bootstrap_profile_prefix() - - path <- Sys.getenv("RENV_PATHS_LIBRARY", unset = NA) - if (!is.na(path)) - return(paste(c(path, prefix), collapse = "/")) - - path <- renv_bootstrap_library_root_impl(project) - if (!is.null(path)) { - name <- renv_bootstrap_library_root_name(project) - return(paste(c(path, prefix, name), collapse = "/")) - } - - renv_bootstrap_paths_renv("library", project = project) - - } - - renv_bootstrap_library_root_impl <- function(project) { - - root <- Sys.getenv("RENV_PATHS_LIBRARY_ROOT", unset = NA) - if (!is.na(root)) - return(root) - - type <- renv_bootstrap_project_type(project) - if (identical(type, "package")) { - userdir <- renv_bootstrap_user_dir() - return(file.path(userdir, "library")) - } - - } - - renv_bootstrap_validate_version <- function(version, description = NULL) { - - # resolve description file - # - # avoid passing lib.loc to `packageDescription()` below, since R will - # use the loaded version of the package by default anyhow. note that - # this function should only be called after 'renv' is loaded - # https://github.com/rstudio/renv/issues/1625 - description <- description %||% packageDescription("renv") - - # check whether requested version 'version' matches loaded version of renv - sha <- attr(version, "sha", exact = TRUE) - valid <- if (!is.null(sha)) - renv_bootstrap_validate_version_dev(sha, description) - else - renv_bootstrap_validate_version_release(version, description) - - if (valid) - return(TRUE) - - # the loaded version of renv doesn't match the requested version; - # give the user instructions on how to proceed - remote <- if (!is.null(description[["RemoteSha"]])) { - paste("rstudio/renv", description[["RemoteSha"]], sep = "@") - } else { - paste("renv", description[["Version"]], sep = "@") - } - - # display both loaded version + sha if available - friendly <- renv_bootstrap_version_friendly( - version = description[["Version"]], - sha = description[["RemoteSha"]] - ) - - fmt <- paste( - "renv %1$s was loaded from project library, but this project is configured to use renv %2$s.", - "- Use `renv::record(\"%3$s\")` to record renv %1$s in the lockfile.", - "- Use `renv::restore(packages = \"renv\")` to install renv %2$s into the project library.", - sep = "\n" - ) - catf(fmt, friendly, renv_bootstrap_version_friendly(version), remote) - - FALSE - - } - - renv_bootstrap_validate_version_dev <- function(version, description) { - expected <- description[["RemoteSha"]] - is.character(expected) && startswith(expected, version) - } - - renv_bootstrap_validate_version_release <- function(version, description) { - expected <- description[["Version"]] - is.character(expected) && identical(expected, version) - } - - renv_bootstrap_hash_text <- function(text) { - - hashfile <- tempfile("renv-hash-") - on.exit(unlink(hashfile), add = TRUE) - - writeLines(text, con = hashfile) - tools::md5sum(hashfile) - - } - - renv_bootstrap_load <- function(project, libpath, version) { - - # try to load renv from the project library - if (!requireNamespace("renv", lib.loc = libpath, quietly = TRUE)) - return(FALSE) - - # warn if the version of renv loaded does not match - renv_bootstrap_validate_version(version) - - # execute renv load hooks, if any - hooks <- getHook("renv::autoload") - for (hook in hooks) - if (is.function(hook)) - tryCatch(hook(), error = warnify) - - # load the project - renv::load(project) - - TRUE - - } - - renv_bootstrap_profile_load <- function(project) { - - # if RENV_PROFILE is already set, just use that - profile <- Sys.getenv("RENV_PROFILE", unset = NA) - if (!is.na(profile) && nzchar(profile)) - return(profile) - - # check for a profile file (nothing to do if it doesn't exist) - path <- renv_bootstrap_paths_renv("profile", profile = FALSE, project = project) - if (!file.exists(path)) - return(NULL) - - # read the profile, and set it if it exists - contents <- readLines(path, warn = FALSE) - if (length(contents) == 0L) - return(NULL) - - # set RENV_PROFILE - profile <- contents[[1L]] - if (!profile %in% c("", "default")) - Sys.setenv(RENV_PROFILE = profile) - - profile - - } - - renv_bootstrap_profile_prefix <- function() { - profile <- renv_bootstrap_profile_get() - if (!is.null(profile)) - return(file.path("profiles", profile, "renv")) - } - - renv_bootstrap_profile_get <- function() { - profile <- Sys.getenv("RENV_PROFILE", unset = "") - renv_bootstrap_profile_normalize(profile) - } - - renv_bootstrap_profile_set <- function(profile) { - profile <- renv_bootstrap_profile_normalize(profile) - if (is.null(profile)) - Sys.unsetenv("RENV_PROFILE") - else - Sys.setenv(RENV_PROFILE = profile) - } - - renv_bootstrap_profile_normalize <- function(profile) { - - if (is.null(profile) || profile %in% c("", "default")) - return(NULL) - - profile - - } - - renv_bootstrap_path_absolute <- function(path) { - - substr(path, 1L, 1L) %in% c("~", "/", "\\") || ( - substr(path, 1L, 1L) %in% c(letters, LETTERS) && - substr(path, 2L, 3L) %in% c(":/", ":\\") - ) - - } - - renv_bootstrap_paths_renv <- function(..., profile = TRUE, project = NULL) { - renv <- Sys.getenv("RENV_PATHS_RENV", unset = "renv") - root <- if (renv_bootstrap_path_absolute(renv)) NULL else project - prefix <- if (profile) renv_bootstrap_profile_prefix() - components <- c(root, renv, prefix, ...) - paste(components, collapse = "/") - } - - renv_bootstrap_project_type <- function(path) { - - descpath <- file.path(path, "DESCRIPTION") - if (!file.exists(descpath)) - return("unknown") - - desc <- tryCatch( - read.dcf(descpath, all = TRUE), - error = identity - ) - - if (inherits(desc, "error")) - return("unknown") - - type <- desc$Type - if (!is.null(type)) - return(tolower(type)) - - package <- desc$Package - if (!is.null(package)) - return("package") - - "unknown" - - } - - renv_bootstrap_user_dir <- function() { - dir <- renv_bootstrap_user_dir_impl() - path.expand(chartr("\\", "/", dir)) - } - - renv_bootstrap_user_dir_impl <- function() { - - # use local override if set - override <- getOption("renv.userdir.override") - if (!is.null(override)) - return(override) - - # use R_user_dir if available - tools <- asNamespace("tools") - if (is.function(tools$R_user_dir)) - return(tools$R_user_dir("renv", "cache")) - - # try using our own backfill for older versions of R - envvars <- c("R_USER_CACHE_DIR", "XDG_CACHE_HOME") - for (envvar in envvars) { - root <- Sys.getenv(envvar, unset = NA) - if (!is.na(root)) - return(file.path(root, "R/renv")) - } - - # use platform-specific default fallbacks - if (Sys.info()[["sysname"]] == "Windows") - file.path(Sys.getenv("LOCALAPPDATA"), "R/cache/R/renv") - else if (Sys.info()[["sysname"]] == "Darwin") - "~/Library/Caches/org.R-project.R/R/renv" - else - "~/.cache/R/renv" - - } - - renv_bootstrap_version_friendly <- function(version, shafmt = NULL, sha = NULL) { - sha <- sha %||% attr(version, "sha", exact = TRUE) - parts <- c(version, sprintf(shafmt %||% " [sha: %s]", substring(sha, 1L, 7L))) - paste(parts, collapse = "") - } - - renv_bootstrap_exec <- function(project, libpath, version) { - if (!renv_bootstrap_load(project, libpath, version)) - renv_bootstrap_run(version, libpath) - } - - renv_bootstrap_run <- function(version, libpath) { - - # perform bootstrap - bootstrap(version, libpath) - - # exit early if we're just testing bootstrap - if (!is.na(Sys.getenv("RENV_BOOTSTRAP_INSTALL_ONLY", unset = NA))) - return(TRUE) - - # try again to load - if (requireNamespace("renv", lib.loc = libpath, quietly = TRUE)) { - return(renv::load(project = getwd())) - } - - # failed to download or load renv; warn the user - msg <- c( - "Failed to find an renv installation: the project will not be loaded.", - "Use `renv::activate()` to re-initialize the project." - ) - - warning(paste(msg, collapse = "\n"), call. = FALSE) - - } - - - renv_bootstrap_in_rstudio <- function() { - commandArgs()[[1]] == "RStudio" - } - - # Used to work around buglet in RStudio if hook uses readline - renv_bootstrap_flush_console <- function() { - tryCatch({ - tools <- as.environment("tools:rstudio") - tools$.rs.api.sendToConsole("", echo = FALSE, focus = FALSE) - }, error = function(cnd) {}) - } - - renv_json_read <- function(file = NULL, text = NULL) { - - jlerr <- NULL - - # if jsonlite is loaded, use that instead - if ("jsonlite" %in% loadedNamespaces()) { - - json <- catch(renv_json_read_jsonlite(file, text)) - if (!inherits(json, "error")) - return(json) - - jlerr <- json - - } - - # otherwise, fall back to the default JSON reader - json <- catch(renv_json_read_default(file, text)) - if (!inherits(json, "error")) - return(json) - - # report an error - if (!is.null(jlerr)) - stop(jlerr) - else - stop(json) - - } - - renv_json_read_jsonlite <- function(file = NULL, text = NULL) { - text <- paste(text %||% read(file), collapse = "\n") - jsonlite::fromJSON(txt = text, simplifyVector = FALSE) - } - - renv_json_read_default <- function(file = NULL, text = NULL) { - - # find strings in the JSON - text <- paste(text %||% read(file), collapse = "\n") - pattern <- '["](?:(?:\\\\.)|(?:[^"\\\\]))*?["]' - locs <- gregexpr(pattern, text, perl = TRUE)[[1]] - - # if any are found, replace them with placeholders - replaced <- text - strings <- character() - replacements <- character() - - if (!identical(c(locs), -1L)) { - - # get the string values - starts <- locs - ends <- locs + attr(locs, "match.length") - 1L - strings <- substring(text, starts, ends) - - # only keep those requiring escaping - strings <- grep("[[\\]{}:]", strings, perl = TRUE, value = TRUE) - - # compute replacements - replacements <- sprintf('"\032%i\032"', seq_along(strings)) - - # replace the strings - mapply(function(string, replacement) { - replaced <<- sub(string, replacement, replaced, fixed = TRUE) - }, strings, replacements) - - } - - # transform the JSON into something the R parser understands - transformed <- replaced - transformed <- gsub("{}", "`names<-`(list(), character())", transformed, fixed = TRUE) - transformed <- gsub("[[{]", "list(", transformed, perl = TRUE) - transformed <- gsub("[]}]", ")", transformed, perl = TRUE) - transformed <- gsub(":", "=", transformed, fixed = TRUE) - text <- paste(transformed, collapse = "\n") - - # parse it - json <- parse(text = text, keep.source = FALSE, srcfile = NULL)[[1L]] - - # construct map between source strings, replaced strings - map <- as.character(parse(text = strings)) - names(map) <- as.character(parse(text = replacements)) - - # convert to list - map <- as.list(map) - - # remap strings in object - remapped <- renv_json_remap(json, map) - - # evaluate - eval(remapped, envir = baseenv()) - - } - - renv_json_remap <- function(json, map) { - - # fix names - if (!is.null(names(json))) { - lhs <- match(names(json), names(map), nomatch = 0L) - rhs <- match(names(map), names(json), nomatch = 0L) - names(json)[rhs] <- map[lhs] - } - - # fix values - if (is.character(json)) - return(map[[json]] %||% json) - - # handle true, false, null - if (is.name(json)) { - text <- as.character(json) - if (text == "true") - return(TRUE) - else if (text == "false") - return(FALSE) - else if (text == "null") - return(NULL) - } - - # recurse - if (is.recursive(json)) { - for (i in seq_along(json)) { - json[i] <- list(renv_json_remap(json[[i]], map)) - } - } - - json - - } - - # load the renv profile, if any - renv_bootstrap_profile_load(project) - - # construct path to library root - root <- renv_bootstrap_library_root(project) - - # construct library prefix for platform - prefix <- renv_bootstrap_platform_prefix() - - # construct full libpath - libpath <- file.path(root, prefix) - - if (renv_bootstrap_in_rstudio()) { - # RStudio only updates console once .Rprofile is finished, so - # instead run code on sessionInit - setHook("rstudio.sessionInit", function(...) { - renv_bootstrap_exec(project, libpath, version) - renv_bootstrap_flush_console() - }) - } else { - renv_bootstrap_exec(project, libpath, version) - } - - invisible() - -}) diff --git a/vignettes/module_skeleton/renv/settings.dcf b/vignettes/module_skeleton/renv/settings.dcf deleted file mode 100644 index 169d82f1..00000000 --- a/vignettes/module_skeleton/renv/settings.dcf +++ /dev/null @@ -1,10 +0,0 @@ -bioconductor.version: -external.libraries: -ignored.packages: -package.dependency.fields: Imports, Depends, LinkingTo -r.version: -snapshot.type: implicit -use.cache: TRUE -vcs.ignore.cellar: TRUE -vcs.ignore.library: TRUE -vcs.ignore.local: TRUE diff --git a/vignettes/module_skeleton/renv/settings.json b/vignettes/module_skeleton/renv/settings.json deleted file mode 100644 index ffdbb320..00000000 --- a/vignettes/module_skeleton/renv/settings.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "bioconductor.version": null, - "external.libraries": [], - "ignored.packages": [], - "package.dependency.fields": [ - "Imports", - "Depends", - "LinkingTo" - ], - "ppm.enabled": null, - "ppm.ignored.urls": [], - "r.version": null, - "snapshot.type": "implicit", - "use.cache": true, - "vcs.ignore.cellar": true, - "vcs.ignore.library": true, - "vcs.ignore.local": true, - "vcs.manage.ignores": true -} diff --git a/vignettes/module_skeleton/resources/exampleAnalysisSpecifications.json b/vignettes/module_skeleton/resources/exampleAnalysisSpecifications.json deleted file mode 100644 index a3111323..00000000 --- a/vignettes/module_skeleton/resources/exampleAnalysisSpecifications.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "sharedResources": [ - { - "cohortDefinitions": [ - { - "cohortId": "1", - "cohortName": "celecoxib", - "cohortDefinition": "...truncated..." - } - ], - "attr_class": ["CohortDefinitionSharedResources", "SharedResources"] - } - ], - "moduleSpecifications": [ - { - "module": "CohortGeneratorModule", - "version": "0.0.1", - "remoteRepo": "github.com", - "remoteUsername": "ohdsi", - "settings": { - "incremental": true, - "generateStats": true - }, - "attr_class": ["CohortGeneratorModuleSpecifications", "ModuleSpecifications"] - } - ], - "attr_class": "AnalysisSpecifications" -} diff --git a/vignettes/module_skeleton/resources/resultsDataModelSpecification.csv b/vignettes/module_skeleton/resources/resultsDataModelSpecification.csv deleted file mode 100644 index 7a739753..00000000 --- a/vignettes/module_skeleton/resources/resultsDataModelSpecification.csv +++ /dev/null @@ -1,8 +0,0 @@ -table_name,column_name,data_type,is_required,primary_key,empty_is_na -my_table,cohort_id,bigint,Yes,Yes,No -my_table,cohort_name,varchar,Yes,No,No -my_table,generation_status,varchar,No,No,No -my_table,start_time,Timestamp,No,No,No -my_table,end_time,Timestamp,No,No,No -my_table,database_id,varchar,Yes,Yes,No - diff --git a/vignettes/module_skeleton/tests/test-eunomia.R b/vignettes/module_skeleton/tests/test-eunomia.R deleted file mode 100644 index 743a3634..00000000 --- a/vignettes/module_skeleton/tests/test-eunomia.R +++ /dev/null @@ -1,41 +0,0 @@ -library(testthat) -library(Eunomia) -connectionDetails <- getEunomiaConnectionDetails() - -workFolder <- tempfile("work") -dir.create(workFolder) -resultsfolder <- tempfile("results") -dir.create(resultsfolder) -jobContext <- readRDS("tests/testJobContext.rds") -jobContext$moduleExecutionSettings$workSubFolder <- workFolder -jobContext$moduleExecutionSettings$resultsSubFolder <- resultsfolder -jobContext$moduleExecutionSettings$connectionDetails <- connectionDetails -jobContext$moduleExecutionSettings$resultsConnectionDetails <- connectionDetails -jobContext$moduleExecutionSettings$resultsDatabaseSchema <- jobContext$moduleExecutionSettings$workDatabaseSchema - -test_that("Test createDataModelSchema", { - source("Main.R") - createDataModelSchema(jobContext) - - # Verify that the table(s) are created - connection <- DatabaseConnector::connect( - connectionDetails = jobContext$moduleExecutionSettings$resultsConnectionDetails - ) - on.exit(DatabaseConnector::disconnect(connection)) - tableList <- DatabaseConnector::getTableNames( - connection = connection - ) - resultsTablesCreated <- tableList[grep(getModuleInfo()$TablePrefix, tableList)] - expect_true(length(resultsTablesCreated) > 0) -}) - -test_that("Run module", { - source("Main.R") - execute(jobContext) - resultsFiles <- list.files(resultsfolder) - expect_true("cg_cohort_definition.csv" %in% resultsFiles) -}) - -unlink(workFolder) -unlink(resultsfolder) -unlink(connectionDetails$server()) diff --git a/vignettes/module_skeleton/tests/testJobContext.rds b/vignettes/module_skeleton/tests/testJobContext.rds deleted file mode 100644 index 1d40f1fe..00000000 Binary files a/vignettes/module_skeleton/tests/testJobContext.rds and /dev/null differ diff --git a/vignettes/module_skeleton/tests/testScript.R b/vignettes/module_skeleton/tests/testScript.R deleted file mode 100644 index 02d8c189..00000000 --- a/vignettes/module_skeleton/tests/testScript.R +++ /dev/null @@ -1,10 +0,0 @@ -library(testthat) - -testFiles <- list.files(file.path("tests"), "test-.*\\.R", full.names = TRUE) -for (testFile in testFiles) { - message(sprintf("*** Running tests in '%s' ***", testFile)) - source(testFile) -} - - -# Note: testthat default structure does not work for non-packages: https://github.com/r-lib/testthat/issues/1490