Skip to content

Commit

Permalink
Resolve merge conflicts with dev
Browse files Browse the repository at this point in the history
Merge branch 'dev' into jt-resolve_merge_conflicts_dev

# Conflicts:
#	DESCRIPTION
  • Loading branch information
jthompson-arcus committed Nov 20, 2024
2 parents bd9d8e0 + 740a10a commit 993035f
Show file tree
Hide file tree
Showing 34 changed files with 662 additions and 182 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
.Rhistory
.RData
.Ruserdata
.Renviron
*.html
*.tmp
~$*
Expand Down
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Package: clinsight
Title: ClinSight
Version: 0.1.0.9008
Version: 0.1.0.9009
DevexVersion: 9000
Authors@R: c(
person("Leonard Daniël", "Samson", , "[email protected]", role = c("cre", "aut"),
Expand Down
15 changes: 13 additions & 2 deletions NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,27 @@

## Changed

- Added `pkgdown` GHA workflow to automatically update documentation site with PRs & pushes to `main` and `dev`
- Generalized `merge_meta_with_data()` to allow user-defined processing functions.
- Added a feature where, in applicable tables, a user can navigate to a form by double-clicking a table row.
- Fixed warnings in `apply_edc_specific_changes` due to the use of a vector within `dplyr::select`.
- Gave users ability to re-organized the column order in any table.
- Added form type as a class to be used in `create_table()` to display tables.
- Add a logging table to the DB for reviews.
- Simplify pulling data from DB for reviews.

## Bug fixes

- When using the `shinyproxy` deployment configuration, the user name is now expected to be base64 encoded, and will now be base64 encoded by `clinsight` by default, so that the app can also handle non-ASCII signs in user names that are stored in HTTP headers. To display the user name correctly, use base64 encoding in the `application.yml` in ShinyProxy settings (for example: `http-headers.X_SP_USERNAME: "#{T(java.util.Base64).getEncoder().encodeToString(oidcUser.getFullName().getBytes())}"`).

# clinsight 0.1.1

## Changed

- Added `pkgdown` GHA workflow to automatically update documentation site with pushes to `main`

## Bug fixes

- Fixed inconsistencies in app messages when saving a review for a form with items with different review states (with some items reviewed previously by a different reviewer, and some items being completely new).
- Fixed a bug where clinsight deployed with `shinyproxy` would crash when a user with non-ASCII letters in their name would attempt to login. In this new version, when using the `shinyproxy` deployment configuration, the user name is now expected to be base64 encoded, and will now be base64 encoded by `clinsight` by default, so that the app can also handle non-ASCII signs in user names that are stored in HTTP headers. To display the user name correctly, use base64 encoding in the `application.yml` in ShinyProxy settings (for example: `http-headers.X_SP_USERNAME: "#{T(java.util.Base64).getEncoder().encodeToString(oidcUser.getFullName().getBytes())}"`).

## `devex` changes
- Added `Excel` download button to Queries table & patient listings that need review.
Expand Down
4 changes: 2 additions & 2 deletions R/app_server.R
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
#' the `header widgets` ([mod_header_widgets_server()]), and the `query page`
#' ([mod_queries_server()])
#'
#' @param input,output,session Internal parameters for {shiny}.
#' @param input,output,session Internal parameters for `shiny`.
#' @seealso [app_ui()], [run_app()]
#'
app_server <- function(
Expand Down Expand Up @@ -58,7 +58,7 @@ app_server <- function(
)
# think of using the pool package, but functions such as row_update are not yet supported.
r <- reactiveValues(
review_data = db_slice_rows(user_db, db_table = "all_review_data"),
review_data = db_get_table(user_db, db_table = "all_review_data"),
query_data = collect_query_data(user_db),
filtered_subjects = app_vars$subject_id,
filtered_data = app_data,
Expand Down
263 changes: 248 additions & 15 deletions R/fct_SQLite.R
Original file line number Diff line number Diff line change
Expand Up @@ -91,17 +91,131 @@ db_create <- function(
status = status
)

new_data <- list(
new_pk_data <- list(
"all_review_data" = df,
"query_data" = query_data_skeleton,
"db_synch_time" = data.frame(synch_time = data_synch_time)
"query_data" = query_data_skeleton
)
idx_pk_cols <- list(
all_review_data = idx_cols
)
other_data <- list(
"db_synch_time" = data.frame(synch_time = data_synch_time),
"db_version" = data.frame(version = db_version)
)
con <- get_db_connection(db_path)
for(i in names(new_data)){
db_add_tables(con, new_pk_data, idx_pk_cols, other_data)
cat("Finished writing to database\n\n")
}

#' Add new tables to DB
#'
#' @param con A DBI Connection to the SQLite DB
#' @param pk_data A named list of data frames to add a primary key field to DB
#' table. Names will correspond to the DB table names.
#' @param unique_cols A named list of the fields defining unique records for a
#' table. Names will correspond to the table to apply the index constraint.
#' @param other_data A named list of other data frames to add to the DB. Names
#' will correspond to the DB table names.
#'
#' @keywords internal
db_add_tables <- function(con, pk_data, unique_cols, other_data) {
for(i in names(pk_data)){
cat("\nCreating new table: ", i, "\n")
DBI::dbWriteTable(con, i, new_data[[i]])
db_add_primary_key(con, i, pk_data[[i]], unique_cols[[i]])
}
cat("Finished writing to database\n\n")
for(i in names(other_data)){
cat("\nCreating new table: ", i, "\n")
DBI::dbWriteTable(con, i, other_data[[i]])
}
cat("\nCreating log table: all_review_data_log\n")
db_add_log(con)
}

#' Add primary key field
#'
#' @param con A DBI Connection to the SQLite DB
#' @param name The table name
#' @param value A data.frame to add to the table
#' @param keys A character vector specifying which columns define a unique row
#' for the table. If `NULL`, no unique index will be created.
#'
#' @keywords internal
db_add_primary_key <- function(con, name, value, keys = NULL) {
fields <- c(id = "INTEGER PRIMARY KEY AUTOINCREMENT", DBI::dbDataType(con, value))
DBI::dbCreateTable(con, name, fields)
if (!is.null(keys)) {
all_keys <- paste(keys, collapse = ", ")
rs <- DBI::dbSendStatement(
con,
sprintf("CREATE UNIQUE INDEX idx_%1$s ON %1$s (%2$s)", name, all_keys)
)
DBI::dbClearResult(rs)
}
DBI::dbAppendTable(con, name, value)
}

#' Add Logging Table
#'
#' Both creates the logging table and the trigger to update it for
#' all_review_data.
#'
#' @param con A DBI Connection to the SQLite DB
#' @param keys A character vector specifying which columns should not be updated
#' in a table. Defaults to 'id' and the package-defined index columns
#' (`idx_cols`).
#'
#' @keywords internal
db_add_log <- function(con, keys = c("id", idx_cols)) {
stopifnot(is.character(keys))
all_keys <- paste(keys, collapse = ", ")
stopifnot("'keys' parameter cannot be empty" = nchar(all_keys) > 0)

DBI::dbCreateTable(
con,
"all_review_data_log",
c(
id = "INTEGER PRIMARY KEY AUTOINCREMENT",
review_id = "INTEGER NOT NULL",
edit_date_time = "CHAR",
reviewed = "CHAR",
comment = "CHAR",
reviewer = "CHAR",
timestamp = "CHAR",
status = "CHAR",
dml_type = "CHAR NOT NULL",
dml_timestamp = "DATETIME DEFAULT CURRENT_TIMESTAMP"
)
)
# This will trigger before any UPDATEs happen on all_review_data. Instead of
# allowing 'id' to be updated, it will throw an error.
rs <- DBI::dbSendStatement(con, paste(
"CREATE TRIGGER all_review_data_id_update_trigger",
sprintf("BEFORE UPDATE OF %s ON all_review_data", all_keys),
"BEGIN",
sprintf("SELECT RAISE(FAIL, 'Fields %s are read only');", all_keys),
"END"
))
DBI::dbClearResult(rs)
rs <- DBI::dbSendStatement(con, paste(
"CREATE TRIGGER all_review_data_update_log_trigger",
"AFTER UPDATE ON all_review_data FOR EACH ROW",
"BEGIN",
"INSERT INTO all_review_data_log (",
"review_id, edit_date_time, reviewed, comment, reviewer, timestamp, status, dml_type",
")",
"VALUES(",
"NEW.id,",
"OLD.edit_date_time,",
"OLD.reviewed,",
"OLD.comment,",
"OLD.reviewer,",
"OLD.timestamp,",
"OLD.status,",
"'UPDATE'",
");",
"END"
))
DBI::dbClearResult(rs)
}

#' Update app database
Expand Down Expand Up @@ -153,7 +267,7 @@ db_update <- function(
update_time = data_synch_time
)
cat("writing updated review data to database...\n")
DBI::dbWriteTable(con, "all_review_data", updated_review_data, append = TRUE)
db_upsert(con, updated_review_data, common_vars)
DBI::dbWriteTable(
con,
"db_synch_time",
Expand All @@ -163,6 +277,41 @@ db_update <- function(
cat("Finished updating review data\n")
}

#' UPSERT to all_review_data
#'
#' Performs an UPSERT on all_review_data. New records will be appended to the
#' table. Changed/updated records will be applied to the table based on the
#' index column constraint.
#'
#' @param con A DBI Connection to the SQLite DB
#' @param data A data frame containing the data to UPSERT into all_review_data
#' @param idx_cols A character vector specifying which columns define a
#' unique index for a row
#'
#' @return invisibly returns TRUE. Is run for it's side effects on the DB.
#'
#' @keywords internal
db_upsert <- function(con, data, idx_cols) {
if ("id" %in% names(data))
data$id <- NULL
cols_to_update <- names(data)[!names(data) %in% idx_cols]
cols_to_insert <- names(data) |>
paste(collapse = ", ")
constraint_cols <- paste(idx_cols, collapse = ", ")
dplyr::copy_to(con, data, "row_updates")
rs <- DBI::dbSendStatement(con, paste(
"INSERT INTO",
"all_review_data",
sprintf("(%s)", cols_to_insert),
sprintf("SELECT %s FROM row_updates WHERE true", cols_to_insert),
"ON CONFLICT",
sprintf("(%s)", constraint_cols),
"DO UPDATE SET",
sprintf("%1$s = excluded.%1$s", cols_to_update) |> paste(collapse = ", ")
))
DBI::dbClearResult(rs)
}


#' Save review in database
#'
Expand All @@ -175,7 +324,6 @@ db_update <- function(
#' @param db_path Character vector. Path to the database.
#' @param tables Character vector. Names of the tables within the database to
#' save the review in.
#' @param common_vars A character vector containing the common key variables.
#' @param review_by A character vector, containing the key variables to perform
#' the review on. For example, the review can be performed on form level
#' (writing the same review to all items in a form), or on item level, with a
Expand All @@ -189,8 +337,6 @@ db_save_review <- function(
rv_row,
db_path,
tables = c("all_review_data"),
common_vars = c("subject_id", "event_name", "item_group",
"form_repeat", "item_name"),
review_by = c("subject_id", "item_group")
){
stopifnot(is.data.frame(rv_row))
Expand All @@ -214,14 +360,21 @@ db_save_review <- function(
warning("Review state unaltered. No review will be saved.")
)}
new_review_rows <- new_review_rows |>
db_slice_rows(slice_vars = c("timestamp", "edit_date_time"), group_vars = common_vars) |>
dplyr::select(-dplyr::all_of(cols_to_change)) |>
# If there are multiple edits, make sure to only select the latest editdatetime for all items:
# dplyr::slice_max(edit_date_time, by = dplyr::all_of(common_vars)) |>
dplyr::bind_cols(rv_row[cols_to_change]) # bind_cols does not work in a db connection.
cat("write updated review data to database\n")
lapply(tables, \(x){DBI::dbWriteTable(db_con, x, new_review_rows, append = TRUE)}) |>
invisible()
dplyr::copy_to(db_con, new_review_rows, "row_updates")
rs <- DBI::dbSendStatement(db_con, paste(
"UPDATE",
tables,
"SET",
sprintf("%1$s = row_updates.%1$s", cols_to_change) |> paste(collapse = ", "),
"FROM",
"row_updates",
"WHERE",
sprintf("%s.id = row_updates.id", tables)
))
DBI::dbClearResult(rs)
cat("finished writing to the tables:", tables, "\n")
}

Expand Down Expand Up @@ -369,3 +522,83 @@ db_get_review <- function(
dplyr::as_tibble()
})
}

db_get_version <- function(db_path) {
stopifnot(file.exists(db_path))
con <- get_db_connection(db_path)
tryCatch({
DBI::dbGetQuery(con, "SELECT version FROM db_version") |>
unlist(use.names = FALSE)
},
error = \(e) {""}
)
}

update_db_version <- function(db_path, version = "1.1") {
stopifnot(file.exists(db_path))
version <- match.arg(version)
temp_path <- withr::local_tempfile(fileext = ".sqlite")
file.copy(db_path, temp_path)
con <- get_db_connection(temp_path)

current_version <- tryCatch({
DBI::dbGetQuery(con, "SELECT version FROM db_version") |>
unlist(use.names = FALSE)}, error = \(e){""})
if(identical(current_version, db_version)) return("Database up to date. No update needed")

review_skeleton <- DBI::dbGetQuery(con, "SELECT * FROM all_review_data LIMIT 0")
rs <- DBI::dbSendQuery(con, "ALTER TABLE all_review_data RENAME TO all_review_data_old")
DBI::dbClearResult(rs)
rs <- DBI::dbSendQuery(con, "ALTER TABLE query_data RENAME TO query_data_old")
DBI::dbClearResult(rs)

new_pk_data <- list(
"all_review_data" = review_skeleton,
"query_data" = query_data_skeleton
)
idx_pk_cols <- list(
all_review_data = idx_cols
)
other_data <- list(
"db_version" = data.frame(version = db_version)
)
db_add_tables(con, new_pk_data, idx_pk_cols, other_data)

query_cols <- paste(names(query_data_skeleton), collapse = ", ")
cat("\nInserting old query records into new table.\n")
rs <- DBI::dbSendStatement(con, sprintf("INSERT INTO query_data (%1$s) SELECT %1$s FROM query_data_old", query_cols))
DBI::dbClearResult(rs)

stopifnot(DBI::dbGetQuery(con, "SELECT COUNT(*) FROM query_data") ==
DBI::dbGetQuery(con, "SELECT COUNT(*) FROM query_data_old"))

rs <- DBI::dbSendStatement(con, "DROP TABLE query_data_old")
DBI::dbClearResult(rs)

cat("\nInserting old review records into new tables.\n")
cols_to_update <- names(review_skeleton)[!names(review_skeleton) %in% idx_pk_cols$all_review_data]
cols_to_insert <- names(review_skeleton) |>
paste(collapse = ", ")
upsert_statement <- paste(
"INSERT INTO",
"all_review_data",
sprintf("(%s)", cols_to_insert),
sprintf("SELECT %s FROM all_review_data_old WHERE true", cols_to_insert),
"ON CONFLICT",
sprintf("(%s)", paste(idx_pk_cols$all_review_data, collapse = ", ")),
"DO UPDATE SET",
sprintf("%1$s = excluded.%1$s", cols_to_update) |> paste(collapse = ", ")
)
rs <- DBI::dbSendStatement(con, upsert_statement)
DBI::dbClearResult(rs)

stopifnot(DBI::dbGetQuery(con, "SELECT COUNT(*) FROM all_review_data") +
DBI::dbGetQuery(con, "SELECT COUNT(*) FROM all_review_data_log") ==
DBI::dbGetQuery(con, "SELECT COUNT(*) FROM all_review_data_old"))

rs <- DBI::dbSendStatement(con, "DROP TABLE all_review_data_old")
DBI::dbClearResult(rs)

file.copy(temp_path, db_path, overwrite = TRUE)
cat("Finished updating to new database standard\n\n")
}
2 changes: 1 addition & 1 deletion R/fct_data_helpers.R
Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,7 @@ get_form_level_data <- function(
#' @param value_column A string containing the column name with the item values.
#' @param id_column The columns identifying a unique participant (subject_id).
#'
#' @return as data frame with an additional column named "base_{varname}".
#' @return as data frame with an additional column named "base_`varname`".
#' @export
#' @examples
#' library(dplyr)
Expand Down
Loading

0 comments on commit 993035f

Please sign in to comment.