# test to address a memory leak issue # https://github.com/darwin-eu-dev/CDMConnector/issues/312 # fixed in v1.1.3 test_that("memory leak does not happen", { con <- DBI::dbConnect(duckdb::duckdb(), eunomia_dir()) cdm <- cdm_from_con(con, "main", "main") conceptSet <- list(asthma = 317009) cdm <- generateConceptCohortSet( cdm = cdm, conceptSet = conceptSet, name = "asthma_1" ) cdm <- generateConceptCohortSet( cdm = cdm, conceptSet = conceptSet, name = "asthma_2", overwrite = TRUE ) # print(object.size(cdm), units = "MB") cdm <- generateConceptCohortSet( cdm = cdm, conceptSet = conceptSet, name = "asthma_3", overwrite = TRUE ) # print(object.size(cdm), units = "MB") cdm <- generateConceptCohortSet( cdm = cdm, conceptSet = conceptSet, name = "asthma_4", overwrite = TRUE ) # print(object.size(cdm), units = "MB") cdm <- generateConceptCohortSet( cdm = cdm, conceptSet = conceptSet, name = "asthma_5", overwrite = TRUE ) # in the memory leak we had an issue where subsequent cohort tables # were much larger # This was strange error. Also can use pryr::object_size() and waldo::compare() to investigate expect_equal(object.size(cdm$asthma_1), object.size(cdm$asthma_5)) DBI::dbDisconnect(con, shutdown = TRUE) })