Add tests for redshift. Clean up test setup file.

This commit is contained in:
Adam Black 2021-08-19 15:17:01 -04:00
parent 05e9b801e0
commit ed5fb2d428
3 changed files with 97 additions and 70 deletions

View File

@ -1,7 +1,7 @@
#' List CDM versions supported by this package
#'
#' @return A character vector containing the support CDM versions in {major}.{minor} format.
#' @export
listSupportedVersions <- function() {
supportedVersions <- c("5.3", "5.4")
return(supportedVersions)

View File

@ -1,21 +1,9 @@
# Download the JDBC drivers used in the tests
# oldJarFolder <- Sys.getenv("DATABASECONNECTOR_JAR_FOLDER")
driverPath <- file.path(Sys.getenv("HOME"), "drivers")
if(!dir.exists(driverPath)) dir.create(driverPath)
Sys.setenv("DATABASECONNECTOR_JAR_FOLDER" = driverPath)
DatabaseConnector::downloadJdbcDrivers("postgresql", pathToDriver = driverPath)
print(Sys.getenv("DATABASECONNECTOR_JAR_FOLDER"))
print(list.files(driverPath))
# if(Sys.getenv("DATABASECONNECTOR_JAR_FOLDER") == "") {
# driverPath <- file.path(Sys.getenv("HOME"), "drivers")
# }
# downloadJdbcDrivers("sql server")
# downloadJdbcDrivers("oracle")
if(!Sys.getenv("LOCAL_TEST" == "TRUE")) {
DatabaseConnector::downloadJdbcDrivers("all", pathToDriver = driverPath)
}
# withr::defer({
# unlink(Sys.getenv("DATABASECONNECTOR_JAR_FOLDER"), recursive = TRUE, force = TRUE)
# Sys.setenv("DATABASECONNECTOR_JAR_FOLDER" = oldJarFolder)
# }, testthat::teardown_env())

View File

@ -1,17 +1,9 @@
library(DatabaseConnector)
connectionDetails <- createConnectionDetails(
dbms = "postgresql",
user = Sys.getenv("CDMDDLBASE_POSTGRESQL_USER"),
password = Sys.getenv("CDMDDLBASE_POSTGRESQL_PASSWORD"),
server = Sys.getenv("CDMDDLBASE_POSTGRESQL_SERVER"),
pathToDriver = file.path(Sys.getenv("HOME"), "drivers")
)
# Helper functions used in tests
.listTablesInSchema <- function(connectionDetails, schema) {
con <- DatabaseConnector::connect(connectionDetails)
tables <- DBI::dbListObjects(con, prefix = "cdmddlbase")
tables <- DBI::dbListObjects(con, prefix = schema)
DatabaseConnector::disconnect(con)
tables <- subset(tables, is_prefix == FALSE)
tables <- subset(tables, grepl("table", table))$table
@ -23,62 +15,109 @@ connectionDetails <- createConnectionDetails(
tables <- .listTablesInSchema(connectionDetails, schema)
con <- DatabaseConnector::connect(connectionDetails)
for(table in tables) {
DBI::dbRemoveTable(con, name = DBI::SQL(paste(schema, table, sep = ".")))
for(table in tables) {
DBI::dbRemoveTable(con, name = DBI::SQL(paste(schema, table, sep = ".")))
}
DatabaseConnector::disconnect(con)
}
.removeConstraintsPostgresql <- function(connectionDetails, schema) {
# the order of removal of constraints matters!
con <- DatabaseConnector::connect(connectionDetails)
constraints <- DBI::dbGetQuery(con,
"SELECT con.conname, rel.relname as relname
FROM pg_catalog.pg_constraint con
INNER JOIN pg_catalog.pg_class rel
ON rel.oid = con.conrelid
INNER JOIN pg_catalog.pg_namespace nsp
ON nsp.oid = connamespace
WHERE nsp.nspname = 'cdmddlbase';")
# .removeConstraintsPostgresql <- function(connectionDetails, schema) {
# # the order of removal of constraints matters!
# con <- DatabaseConnector::connect(connectionDetails)
# constraints <- DBI::dbGetQuery(con,
# "SELECT con.conname, rel.relname as relname
# FROM pg_catalog.pg_constraint con
# INNER JOIN pg_catalog.pg_class rel
# ON rel.oid = con.conrelid
# INNER JOIN pg_catalog.pg_namespace nsp
# ON nsp.oid = connamespace
# WHERE nsp.nspname = 'cdmddlbase';")
#
#
# constraints <- dplyr::mutate(constraints, sql = paste0("alter table ", schema, ".", relname, " drop constraint if exists ", conname, ";\n" ))
#
# sql <- paste(rev(constraints$sql), collapse = "")
# executeSql(con, sql)
#
# disconnect(con)
#
# }
constraints <- dplyr::mutate(constraints, sql = paste0("alter table ", schema, ".", relname, " drop constraint if exists ", conname, ";\n" ))
sql <- paste(rev(constraints$sql), collapse = "")
executeSql(con, sql)
disconnect(con)
}
test_that("Database can be connected to", {
expect_error(con <- connect(connectionDetails), NA)
disconnect(con)
})
test_that("Execute DDL on Postgres", {
# make sure the schema is cleared out
connectionDetails <- createConnectionDetails(
dbms = "postgresql",
user = Sys.getenv("CDMDDLBASE_POSTGRESQL_USER"),
password = Sys.getenv("CDMDDLBASE_POSTGRESQL_PASSWORD"),
server = Sys.getenv("CDMDDLBASE_POSTGRESQL_SERVER"),
pathToDriver = file.path(Sys.getenv("HOME"), "drivers")
)
cdmDatabaseSchema <- Sys.getenv("CDMDDLBASE_POSTGRESQL_SCHEMA")
cdmVersion <- "5.4"
# .removeConstraintsPostgresql(connectionDetails, cdmDatabaseSchema)
.dropAllTablesFromSchema(connectionDetails, cdmDatabaseSchema)
cat(paste("Connecting to schema", cdmDatabaseSchema, "\n"))
executeDdl(connectionDetails,
cdmVersion = cdmVersion,
cdmDatabaseSchema = cdmDatabaseSchema,
executeDdl = TRUE,
executePrimaryKey = FALSE,
executeForeignKey = FALSE)
tables <- .listTablesInSchema(connectionDetails, schema = cdmDatabaseSchema)
expect_error(con <- connect(connectionDetails), NA)
disconnect(con)
cdmTableCsvLoc <- system.file(file.path("csv", paste0("OMOP_CDMv", cdmVersion, "_Table_Level.csv")), package = "CommonDataModel", mustWork = TRUE)
tableSpecs <- read.csv(cdmTableCsvLoc, stringsAsFactors = FALSE)$cdmTableName
for(cdmVersion in listSupportedVersions()) {
# make sure the schema is cleared out
.dropAllTablesFromSchema(connectionDetails, cdmDatabaseSchema)
cat(paste("Connecting to schema", cdmDatabaseSchema, "\n"))
executeDdl(connectionDetails,
cdmVersion = cdmVersion,
cdmDatabaseSchema = cdmDatabaseSchema,
executeDdl = TRUE,
executePrimaryKey = TRUE,
executeForeignKey = FALSE)
# check that the tables in the database match the tables in the specification
expect_equal(sort(tables), sort(tableSpecs))
tables <- .listTablesInSchema(connectionDetails, schema = cdmDatabaseSchema)
# clear schema
# .removeConstraintsPostgresql(connectionDetails, cdmDatabaseSchema)
.dropAllTablesFromSchema(connectionDetails, cdmDatabaseSchema)
cdmTableCsvLoc <- system.file(file.path("csv", paste0("OMOP_CDMv", cdmVersion, "_Table_Level.csv")), package = "CommonDataModel", mustWork = TRUE)
tableSpecs <- read.csv(cdmTableCsvLoc, stringsAsFactors = FALSE)$cdmTableName
# check that the tables in the database match the tables in the specification
expect_equal(sort(tables), sort(tableSpecs))
# clear schema
.dropAllTablesFromSchema(connectionDetails, cdmDatabaseSchema)
}
})
test_that("Execute DDL on Redshift", {
connectionDetails <- createConnectionDetails(
dbms = "redshift",
user = Sys.getenv("CDMDDLBASE_REDSHIFT_USER"),
password = Sys.getenv("CDMDDLBASE_REDSHIFT_PASSWORD"),
server = Sys.getenv("CDMDDLBASE_REDSHIFT_SERVER"),
pathToDriver = file.path(Sys.getenv("HOME"), "drivers")
)
expect_error(con <- connect(connectionDetails), NA)
disconnect(con)
cdmDatabaseSchema <- Sys.getenv("CDMDDLBASE_REDSHIFT_SCHEMA")
for(cdmVersion in listSupportedVersions()) {
# make sure the schema is cleared out
.dropAllTablesFromSchema(connectionDetails, cdmDatabaseSchema)
executeDdl(connectionDetails,
cdmVersion = cdmVersion,
cdmDatabaseSchema = cdmDatabaseSchema,
executeDdl = TRUE,
executePrimaryKey = TRUE,
executeForeignKey = FALSE)
tables <- .listTablesInSchema(connectionDetails, schema = cdmDatabaseSchema)
cdmTableCsvLoc <- system.file(file.path("csv", paste0("OMOP_CDMv", cdmVersion, "_Table_Level.csv")), package = "CommonDataModel", mustWork = TRUE)
tableSpecs <- read.csv(cdmTableCsvLoc, stringsAsFactors = FALSE)$cdmTableName
# check that the tables in the database match the tables in the specification
expect_equal(sort(tables), sort(tableSpecs))
# clear schema
.dropAllTablesFromSchema(connectionDetails, cdmDatabaseSchema)
}
})