1
0
mirror of https://github.com/msberends/AMR.git synced 2024-12-27 13:26:11 +01:00
AMR/R/resistance_predict.R

414 lines
17 KiB
R
Raw Normal View History

2018-08-10 15:01:05 +02:00
# ==================================================================== #
# TITLE: #
2022-10-05 09:12:22 +02:00
# AMR: An R Package for Working with Antimicrobial Resistance Data #
2018-08-10 15:01:05 +02:00
# #
# SOURCE CODE: #
2020-07-08 14:48:06 +02:00
# https://github.com/msberends/AMR #
2018-08-10 15:01:05 +02:00
# #
# PLEASE CITE THIS SOFTWARE AS: #
# Berends MS, Luz CF, Friedrich AW, et al. (2022). #
# AMR: An R Package for Working with Antimicrobial Resistance Data. #
# Journal of Statistical Software, 104(3), 1-31. #
2023-05-27 10:39:22 +02:00
# https://doi.org/10.18637/jss.v104.i03 #
2022-10-05 09:12:22 +02:00
# #
2022-12-27 15:16:15 +01:00
# Developed at the University of Groningen and the University Medical #
# Center Groningen in The Netherlands, in collaboration with many #
# colleagues from around the world, see our website. #
2018-08-10 15:01:05 +02:00
# #
2019-01-02 23:24:07 +01:00
# This R package is free software; you can freely use and distribute #
# it for both personal and commercial purposes under the terms of the #
# GNU General Public License version 2.0 (GNU GPL-2), as published by #
# the Free Software Foundation. #
# We created this package for both routine data analysis and academic #
# research and it was publicly released in the hope that it will be #
# useful, but it comes WITHOUT ANY WARRANTY OR LIABILITY. #
2020-10-08 11:16:03 +02:00
# #
# Visit our website for the full manual and a complete tutorial about #
# how to conduct AMR data analysis: https://msberends.github.io/AMR/ #
2018-08-10 15:01:05 +02:00
# ==================================================================== #
2021-07-12 20:24:49 +02:00
#' Predict Antimicrobial Resistance
2018-08-10 15:01:05 +02:00
#'
#' Create a prediction model to predict antimicrobial resistance for the next years on statistical solid ground. Standard errors (SE) will be returned as columns `se_min` and `se_max`. See *Examples* for a real live example.
2021-07-12 22:12:28 +02:00
#' @param object model data to be plotted
#' @param col_ab column name of `x` containing antimicrobial interpretations (`"R"`, `"I"` and `"S"`)
#' @param col_date column name of the date, will be used to calculate years if this column doesn't consist of years already - the default is the first column of with a date class
#' @param year_min lowest year to use in the prediction model, dafaults to the lowest year in `col_date`
#' @param year_max highest year to use in the prediction model - the default is 10 years after today
#' @param year_every unit of sequence between lowest year found in the data and `year_max`
2018-08-10 15:01:05 +02:00
#' @param minimum minimal amount of available isolates per year to include. Years containing less observations will be estimated by the model.
#' @param model the statistical model of choice. This could be a generalised linear regression model with binomial distribution (i.e. using `glm(..., family = binomial)`, assuming that a period of zero resistance was followed by a period of increasing resistance leading slowly to more and more resistance. See *Details* for all valid options.
2022-08-28 10:31:50 +02:00
#' @param I_as_S a [logical] to indicate whether values `"I"` should be treated as `"S"` (will otherwise be treated as `"R"`). The default, `TRUE`, follows the redefinition by EUCAST about the interpretation of I (increased exposure) in 2019, see section *Interpretation of S, I and R* below.
2021-05-12 18:15:03 +02:00
#' @param preserve_measurements a [logical] to indicate whether predictions of years that are actually available in the data should be overwritten by the original data. The standard errors of those years will be `NA`.
#' @param info a [logical] to indicate whether textual analysis should be printed with the name and [summary()] of the statistical model.
2019-01-15 16:38:54 +01:00
#' @param main title of the plot
2021-05-12 18:15:03 +02:00
#' @param ribbon a [logical] to indicate whether a ribbon should be shown (default) or error bars
2020-12-22 00:51:17 +01:00
#' @param ... arguments passed on to functions
2023-01-21 23:47:20 +01:00
#' @inheritSection as.sir Interpretation of SIR
2019-05-13 16:35:48 +02:00
#' @inheritParams first_isolate
#' @inheritParams graphics::plot
2020-12-22 00:51:17 +01:00
#' @details Valid options for the statistical model (argument `model`) are:
#' - `"binomial"` or `"binom"` or `"logit"`: a generalised linear regression model with binomial distribution
#' - `"loglin"` or `"poisson"`: a generalised log-linear regression model with poisson distribution
#' - `"lin"` or `"linear"`: a linear regression model
#' @return A [data.frame] with extra class [`resistance_predict`] with columns:
#' - `year`
#' - `value`, the same as `estimated` when `preserve_measurements = FALSE`, and a combination of `observed` and `estimated` otherwise
#' - `se_min`, the lower bound of the standard error with a minimum of `0` (so the standard error will never go below 0%)
#' - `se_max` the upper bound of the standard error with a maximum of `1` (so the standard error will never go above 100%)
#' - `observations`, the total number of available observations in that year, i.e. \eqn{S + I + R}
#' - `observed`, the original observed resistant percentages
#' - `estimated`, the estimated resistant percentages, calculated by the model
2022-08-28 10:31:50 +02:00
#'
#' Furthermore, the model itself is available as an attribute: `attributes(x)$model`, see *Examples*.
#' @seealso The [proportion()] functions to calculate resistance
2022-08-28 10:31:50 +02:00
#'
#' Models: [lm()] [glm()]
2018-08-10 15:01:05 +02:00
#' @rdname resistance_predict
#' @export
#' @importFrom stats predict glm lm
#' @examples
2022-08-28 10:31:50 +02:00
#' x <- resistance_predict(example_isolates,
#' col_ab = "AMX",
#' year_min = 2010,
#' model = "binomial"
#' )
2019-01-15 12:45:24 +01:00
#' plot(x)
2021-05-24 09:00:11 +02:00
#' \donttest{
2020-05-16 21:40:50 +02:00
#' if (require("ggplot2")) {
2023-01-21 23:47:20 +01:00
#' ggplot_sir_predict(x)
2020-05-16 21:40:50 +02:00
#' }
2018-08-10 15:01:05 +02:00
#'
2020-05-16 13:05:47 +02:00
#' # using dplyr:
2020-05-16 21:40:50 +02:00
#' if (require("dplyr")) {
2020-05-16 13:05:47 +02:00
#' x <- example_isolates %>%
#' filter_first_isolate() %>%
#' filter(mo_genus(mo) == "Staphylococcus") %>%
#' resistance_predict("PEN", model = "binomial")
2022-08-27 20:49:37 +02:00
#' print(plot(x))
2019-02-11 10:27:10 +01:00
#'
2020-05-16 13:05:47 +02:00
#' # get the model from the object
#' mymodel <- attributes(x)$model
#' summary(mymodel)
#' }
2019-02-11 10:27:10 +01:00
#'
#' # create nice plots with ggplot2 yourself
2022-08-27 20:49:37 +02:00
#' if (require("dplyr") && require("ggplot2")) {
#' data <- example_isolates %>%
2018-12-22 22:39:34 +01:00
#' filter(mo == as.mo("E. coli")) %>%
2022-08-28 10:31:50 +02:00
#' resistance_predict(
#' col_ab = "AMX",
#' col_date = "date",
#' model = "binomial",
#' info = FALSE,
#' minimum = 15
#' )
2022-08-21 16:37:20 +02:00
#' head(data)
2021-07-12 20:24:49 +02:00
#' autoplot(data)
2018-08-10 15:01:05 +02:00
#' }
2021-05-24 09:00:11 +02:00
#' }
2019-05-13 16:35:48 +02:00
resistance_predict <- function(x,
2018-08-10 15:01:05 +02:00
col_ab,
2019-01-15 12:45:24 +01:00
col_date = NULL,
2018-08-10 15:01:05 +02:00
year_min = NULL,
year_max = NULL,
year_every = 1,
minimum = 30,
2019-08-07 15:37:39 +02:00
model = NULL,
2019-05-13 16:35:48 +02:00
I_as_S = TRUE,
2018-08-10 15:01:05 +02:00
preserve_measurements = TRUE,
info = interactive(),
2019-05-13 16:35:48 +02:00
...) {
meet_criteria(x, allow_class = "data.frame")
meet_criteria(col_ab, allow_class = "character", has_length = 1, is_in = colnames(x))
meet_criteria(col_date, allow_class = "character", has_length = 1, is_in = colnames(x), allow_NULL = TRUE)
meet_criteria(year_min, allow_class = c("numeric", "integer"), has_length = 1, allow_NULL = TRUE, is_positive = TRUE, is_finite = TRUE)
meet_criteria(year_max, allow_class = c("numeric", "integer"), has_length = 1, allow_NULL = TRUE, is_positive = TRUE, is_finite = TRUE)
meet_criteria(year_every, allow_class = c("numeric", "integer"), has_length = 1, is_positive = TRUE, is_finite = TRUE)
meet_criteria(minimum, allow_class = c("numeric", "integer"), has_length = 1, is_positive_or_zero = TRUE, is_finite = TRUE)
meet_criteria(model, allow_class = c("character", "function"), has_length = 1, allow_NULL = TRUE)
meet_criteria(I_as_S, allow_class = "logical", has_length = 1)
meet_criteria(preserve_measurements, allow_class = "logical", has_length = 1)
meet_criteria(info, allow_class = "logical", has_length = 1)
2022-08-28 10:31:50 +02:00
2020-12-22 00:51:17 +01:00
stop_if(is.null(model), 'choose a regression model with the `model` argument, e.g. resistance_predict(..., model = "binomial")')
2022-08-28 10:31:50 +02:00
2022-08-27 20:49:37 +02:00
x.bak <- x
x <- as.data.frame(x, stringsAsFactors = FALSE)
2022-08-28 10:31:50 +02:00
2019-01-15 12:45:24 +01:00
# -- date
if (is.null(col_date)) {
2019-05-23 16:58:59 +02:00
col_date <- search_type_in_df(x = x, type = "date")
stop_if(is.null(col_date), "`col_date` must be set")
2019-01-15 12:45:24 +01:00
}
2022-08-28 10:31:50 +02:00
stop_ifnot(
col_date %in% colnames(x),
"column '", col_date, "' not found"
)
2018-08-10 15:01:05 +02:00
year <- function(x) {
2019-11-11 10:46:39 +01:00
# don't depend on lubridate or so, would be overkill for only this function
2019-10-11 17:21:02 +02:00
if (all(grepl("^[0-9]{4}$", x))) {
2020-06-17 21:35:10 +02:00
as.integer(x)
2018-08-10 15:01:05 +02:00
} else {
2019-10-11 17:21:02 +02:00
as.integer(format(as.Date(x), "%Y"))
2018-08-10 15:01:05 +02:00
}
}
2022-08-28 10:31:50 +02:00
2020-05-16 13:05:47 +02:00
df <- x
2023-01-21 23:47:20 +01:00
df[, col_ab] <- droplevels(as.sir(df[, col_ab, drop = TRUE]))
if (I_as_S == TRUE) {
2020-05-16 13:05:47 +02:00
# then I as S
2022-10-05 09:12:22 +02:00
df[, col_ab] <- gsub("I", "S", df[, col_ab, drop = TRUE], fixed = TRUE)
} else {
# then I as R
2022-10-05 09:12:22 +02:00
df[, col_ab] <- gsub("I", "R", df[, col_ab, drop = TRUE], fixed = TRUE)
2018-08-10 15:01:05 +02:00
}
2020-05-16 13:05:47 +02:00
df[, col_ab] <- ifelse(is.na(df[, col_ab, drop = TRUE]), 0, df[, col_ab, drop = TRUE])
2022-08-28 10:31:50 +02:00
2020-05-16 13:05:47 +02:00
# remove rows with NAs
df <- subset(df, !is.na(df[, col_ab, drop = TRUE]))
df$year <- year(df[, col_date, drop = TRUE])
2022-08-27 20:49:37 +02:00
df <- as.data.frame(rbind(table(df[, c("year", col_ab), drop = FALSE])),
2022-08-28 10:31:50 +02:00
stringsAsFactors = FALSE
)
2020-05-16 13:05:47 +02:00
df$year <- as.integer(rownames(df))
rownames(df) <- NULL
2022-08-28 10:31:50 +02:00
2020-05-16 13:05:47 +02:00
df <- subset(df, sum(df$R + df$S, na.rm = TRUE) >= minimum)
# nolint start
2020-05-16 13:05:47 +02:00
df_matrix <- as.matrix(df[, c("R", "S"), drop = FALSE])
# nolint end
2022-08-28 10:31:50 +02:00
stop_if(NROW(df) == 0, "there are no observations")
2022-08-28 10:31:50 +02:00
2018-08-10 15:01:05 +02:00
year_lowest <- min(df$year)
if (is.null(year_min)) {
year_min <- year_lowest
} else {
year_min <- max(year_min, year_lowest, na.rm = TRUE)
}
if (is.null(year_max)) {
2019-01-12 19:31:30 +01:00
year_max <- year(Sys.Date()) + 10
2018-08-10 15:01:05 +02:00
}
2022-08-28 10:31:50 +02:00
2019-01-15 12:45:24 +01:00
years <- list(year = seq(from = year_min, to = year_max, by = year_every))
2022-08-28 10:31:50 +02:00
2019-10-11 17:21:02 +02:00
if (model %in% c("binomial", "binom", "logit")) {
2019-01-15 12:45:24 +01:00
model <- "binomial"
model_lm <- with(df, glm(df_matrix ~ year, family = binomial))
2022-11-14 15:20:39 +01:00
if (isTRUE(info)) {
2019-10-11 17:21:02 +02:00
cat("\nLogistic regression model (logit) with binomial distribution")
cat("\n------------------------------------------------------------\n")
2019-01-15 12:45:24 +01:00
print(summary(model_lm))
2018-08-10 15:01:05 +02:00
}
2022-08-28 10:31:50 +02:00
2019-01-15 12:45:24 +01:00
predictmodel <- predict(model_lm, newdata = years, type = "response", se.fit = TRUE)
2018-08-10 15:01:05 +02:00
prediction <- predictmodel$fit
se <- predictmodel$se.fit
2019-10-11 17:21:02 +02:00
} else if (model %in% c("loglin", "poisson")) {
2019-01-15 12:45:24 +01:00
model <- "poisson"
model_lm <- with(df, glm(R ~ year, family = poisson))
2022-11-14 15:20:39 +01:00
if (isTRUE(info)) {
2019-10-11 17:21:02 +02:00
cat("\nLog-linear regression model (loglin) with poisson distribution")
cat("\n--------------------------------------------------------------\n")
2019-01-15 12:45:24 +01:00
print(summary(model_lm))
2018-08-10 15:01:05 +02:00
}
2022-08-28 10:31:50 +02:00
2019-01-15 12:45:24 +01:00
predictmodel <- predict(model_lm, newdata = years, type = "response", se.fit = TRUE)
2018-08-10 15:01:05 +02:00
prediction <- predictmodel$fit
se <- predictmodel$se.fit
2019-10-11 17:21:02 +02:00
} else if (model %in% c("lin", "linear")) {
2019-01-15 12:45:24 +01:00
model <- "linear"
model_lm <- with(df, lm((R / (R + S)) ~ year))
2022-11-14 15:20:39 +01:00
if (isTRUE(info)) {
2019-10-11 17:21:02 +02:00
cat("\nLinear regression model")
cat("\n-----------------------\n")
2019-01-15 12:45:24 +01:00
print(summary(model_lm))
2018-08-10 15:01:05 +02:00
}
2022-08-28 10:31:50 +02:00
2019-01-15 12:45:24 +01:00
predictmodel <- predict(model_lm, newdata = years, se.fit = TRUE)
2018-08-10 15:01:05 +02:00
prediction <- predictmodel$fit
se <- predictmodel$se.fit
} else {
stop("no valid model selected. See `?resistance_predict`.")
2018-08-10 15:01:05 +02:00
}
2022-08-28 10:31:50 +02:00
2018-08-10 15:01:05 +02:00
# prepare the output dataframe
2022-08-28 10:31:50 +02:00
df_prediction <- data.frame(
year = unlist(years),
value = prediction,
se_min = prediction - se,
se_max = prediction + se,
stringsAsFactors = FALSE
)
2019-10-11 17:21:02 +02:00
if (model == "poisson") {
2020-05-16 13:05:47 +02:00
df_prediction$value <- as.integer(format(df_prediction$value, scientific = FALSE))
df_prediction$se_min <- as.integer(df_prediction$se_min)
df_prediction$se_max <- as.integer(df_prediction$se_max)
2018-08-10 15:01:05 +02:00
} else {
2020-05-16 13:05:47 +02:00
# se_max not above 1
2022-10-05 09:12:22 +02:00
df_prediction$se_max <- pmin(df_prediction$se_max, 1)
2018-08-10 15:01:05 +02:00
}
2020-05-16 13:05:47 +02:00
# se_min not below 0
2022-10-05 09:12:22 +02:00
df_prediction$se_min <- pmax(df_prediction$se_min, 0)
2022-08-28 10:31:50 +02:00
df_observations <- data.frame(
year = df$year,
observations = df$R + df$S,
observed = df$R / (df$R + df$S),
stringsAsFactors = FALSE
)
2023-02-09 13:07:39 +01:00
df_prediction <- df_prediction %pm>%
pm_left_join(df_observations, by = "year")
2020-05-16 13:05:47 +02:00
df_prediction$estimated <- df_prediction$value
2022-08-28 10:31:50 +02:00
2018-08-10 15:01:05 +02:00
if (preserve_measurements == TRUE) {
# replace estimated data by observed data
2020-05-16 13:05:47 +02:00
df_prediction$value <- ifelse(!is.na(df_prediction$observed), df_prediction$observed, df_prediction$value)
df_prediction$se_min <- ifelse(!is.na(df_prediction$observed), NA, df_prediction$se_min)
df_prediction$se_max <- ifelse(!is.na(df_prediction$observed), NA, df_prediction$se_max)
2018-08-10 15:01:05 +02:00
}
2022-08-28 10:31:50 +02:00
2022-10-05 09:12:22 +02:00
df_prediction$value <- ifelse(df_prediction$value > 1, 1, pmax(df_prediction$value, 0))
2022-08-27 20:49:37 +02:00
df_prediction <- df_prediction[order(df_prediction$year), , drop = FALSE]
2022-08-28 10:31:50 +02:00
2023-01-23 15:01:21 +01:00
out <- as_original_data_class(df_prediction, class(x.bak)) # will remove tibble groups
2022-08-27 20:49:37 +02:00
structure(out,
2022-08-28 10:31:50 +02:00
class = c("resistance_predict", class(out)),
I_as_S = I_as_S,
model_title = model,
model = model_lm,
ab = col_ab
2019-01-15 12:45:24 +01:00
)
}
#' @rdname resistance_predict
#' @export
2023-01-21 23:47:20 +01:00
sir_predict <- resistance_predict
2019-01-15 12:45:24 +01:00
2020-05-28 16:48:55 +02:00
#' @method plot resistance_predict
2019-01-15 12:45:24 +01:00
#' @export
#' @importFrom graphics plot axis arrows points
2019-01-15 12:45:24 +01:00
#' @rdname resistance_predict
2019-05-13 20:16:51 +02:00
plot.resistance_predict <- function(x, main = paste("Resistance Prediction of", x_name), ...) {
x_name <- paste0(ab_name(attributes(x)$ab), " (", attributes(x)$ab, ")")
meet_criteria(main, allow_class = "character", has_length = 1)
2022-08-28 10:31:50 +02:00
2019-05-13 16:35:48 +02:00
if (attributes(x)$I_as_S == TRUE) {
2019-01-15 12:45:24 +01:00
ylab <- "%R"
2019-05-13 16:35:48 +02:00
} else {
ylab <- "%IR"
2018-08-10 15:01:05 +02:00
}
2022-08-28 10:31:50 +02:00
plot(
x = x$year,
y = x$value,
ylim = c(0, 1),
yaxt = "n", # no y labels
pch = 19, # closed dots
ylab = paste0("Percentage (", ylab, ")"),
xlab = "Year",
main = main,
sub = paste0(
"(n = ", sum(x$observations, na.rm = TRUE),
", model: ", attributes(x)$model_title, ")"
),
cex.sub = 0.75
)
2019-01-15 12:45:24 +01:00
axis(side = 2, at = seq(0, 1, 0.1), labels = paste0(0:10 * 10, "%"))
2022-08-28 10:31:50 +02:00
2019-02-11 10:27:10 +01:00
# hack for error bars: https://stackoverflow.com/a/22037078/4575331
2022-08-28 10:31:50 +02:00
arrows(
x0 = x$year,
y0 = x$se_min,
x1 = x$year,
y1 = x$se_max,
length = 0.05, angle = 90, code = 3, lwd = 1.5
)
2019-02-11 10:27:10 +01:00
# overlay grey points for prediction
2022-08-28 10:31:50 +02:00
points(
x = subset(x, is.na(observations))$year,
y = subset(x, is.na(observations))$value,
pch = 19,
col = "grey40"
)
2018-08-10 15:01:05 +02:00
}
#' @rdname resistance_predict
#' @export
2023-01-21 23:47:20 +01:00
ggplot_sir_predict <- function(x,
2019-05-13 20:16:51 +02:00
main = paste("Resistance Prediction of", x_name),
2019-02-11 10:27:10 +01:00
ribbon = TRUE,
...) {
x_name <- paste0(ab_name(attributes(x)$ab), " (", attributes(x)$ab, ")")
meet_criteria(main, allow_class = "character", has_length = 1)
meet_criteria(ribbon, allow_class = "logical", has_length = 1)
2022-08-28 10:31:50 +02:00
stop_ifnot_installed("ggplot2")
stop_ifnot(inherits(x, "resistance_predict"), "`x` must be a resistance prediction model created with resistance_predict()")
2022-08-28 10:31:50 +02:00
2019-05-13 16:35:48 +02:00
if (attributes(x)$I_as_S == TRUE) {
2019-01-15 12:45:24 +01:00
ylab <- "%R"
2019-05-13 16:35:48 +02:00
} else {
ylab <- "%IR"
2019-01-15 12:45:24 +01:00
}
2022-08-28 10:31:50 +02:00
p <- ggplot2::ggplot(
as.data.frame(x, stringsAsFactors = FALSE),
ggplot2::aes(x = year, y = value)
) +
ggplot2::geom_point(
data = subset(x, !is.na(observations)),
size = 2
) +
2019-02-11 10:27:10 +01:00
scale_y_percent(limits = c(0, 1)) +
2022-08-28 10:31:50 +02:00
ggplot2::labs(
title = main,
y = paste0("Percentage (", ylab, ")"),
x = "Year",
caption = paste0(
"(n = ", sum(x$observations, na.rm = TRUE),
", model: ", attributes(x)$model_title, ")"
)
)
2019-02-11 10:27:10 +01:00
if (ribbon == TRUE) {
p <- p + ggplot2::geom_ribbon(ggplot2::aes(ymin = se_min, ymax = se_max), alpha = 0.25)
} else {
p <- p + ggplot2::geom_errorbar(ggplot2::aes(ymin = se_min, ymax = se_max), na.rm = TRUE, width = 0.5)
}
p <- p +
# overlay grey points for prediction
2022-08-28 10:31:50 +02:00
ggplot2::geom_point(
data = subset(x, is.na(observations)),
size = 2,
colour = "grey40"
)
2019-02-11 10:27:10 +01:00
p
2019-01-15 12:45:24 +01:00
}
2021-07-12 20:24:49 +02:00
#' @method autoplot resistance_predict
#' @rdname resistance_predict
# will be exported using s3_register() in R/zzz.R
2021-07-12 20:24:49 +02:00
autoplot.resistance_predict <- function(object,
main = paste("Resistance Prediction of", x_name),
ribbon = TRUE,
...) {
x_name <- paste0(ab_name(attributes(object)$ab), " (", attributes(object)$ab, ")")
meet_criteria(main, allow_class = "character", has_length = 1)
meet_criteria(ribbon, allow_class = "logical", has_length = 1)
2023-01-21 23:47:20 +01:00
ggplot_sir_predict(x = object, main = main, ribbon = ribbon, ...)
}
2021-07-12 20:24:49 +02:00
#' @method fortify resistance_predict
#' @noRd
# will be exported using s3_register() in R/zzz.R
2021-07-12 20:24:49 +02:00
fortify.resistance_predict <- function(model, data, ...) {
as.data.frame(model)
}