#-----------------------------------------------------------------------
# Pacotes.

rm(list = objects())

library(tidyverse)
library(readxl)

1 Importação e organização

#-----------------------------------------------------------------------
# Importação dos dados.

# Importa direto dos `*.xlsx`.
tb1 <- read_excel("Cmel_nym_ipro.xlsx", sheet = 1)
tb2 <- read_excel("Cmel_nym_ipro.xlsx", sheet = 2)
tb <- bind_rows(tb1, tb2)
rm(tb1, tb2)

# Calcular o diâmetro médio. Assume que os diâmetros perpendiculares
# estão aos pares conforme garantia da Jhulia.
tb$eixo <- rep(c("a", "b"), times = nrow(tb)/2)
tb$ue <- rep(rep(1:4, each = 2), times = nrow(tb)/8)
tb <- tb %>%
    spread(key = "eixo", value = "growth")

# O gráfico faz sentido.
plot(a ~ b, data = tb, asp = 1)
abline(a = 0, b = 1, col = "red")

# Calcula o crescimento médio com os dois eixos.
tb <- tb %>%
    mutate(growth = (a + b)/2,
           a = NULL,
           b = NULL)

# Estrutura da tabela.
str(tb)
## tibble[,5] [1,440 × 5] (S3: tbl_df/tbl/data.frame)
##  $ isolate: chr [1:1440] "col006" "col006" "col006" "col006" ...
##  $ species: chr [1:1440] "C. melonis" "C. melonis" "C. melonis" "C. melonis" ...
##  $ dose   : num [1:1440] 0 0 0 0 1 1 1 1 3 3 ...
##  $ ue     : int [1:1440] 1 2 3 4 1 2 3 4 1 2 ...
##  $ growth : num [1:1440] 35.1 32.1 32.2 30.2 31.4 ...
# Faz a conversão para fator.
tb <- tb %>%
    mutate_at(c("isolate", "species"), "factor")

# Tabela de frequência dos pontos experimentais.
xtabs(~species, data = tb)
## species
##   C. melonis C. nymphaeae 
##          720          720
xtabs(~species + dose, data = tb)
##               dose
## species          0   1   3  10  30 100
##   C. melonis   120 120 120 120 120 120
##   C. nymphaeae 120 120 120 120 120 120
xtabs(~isolate, data = tb)
## isolate
## col006 col009 col012 col014 col020 col021 col027 col028 col029 col032 
##     24     24     24     24     24     24     24     24     24     24 
## col034 col038 col039 col041 col042 col047 col049 col052 col054 col056 
##     24     24     24     24     24     24     24     24     24     24 
## col059 col061 col064 col066 col073 col077 col083 col089 col091 col093 
##     24     24     24     24     24     24     24     24     24     24 
## col094 col101 col106 col107 col108 col112 col113 col114 col115 col117 
##     24     24     24     24     24     24     24     24     24     24 
## col119 col120 col121 col122 col123 col124 col126 col127 col128 col129 
##     24     24     24     24     24     24     24     24     24     24 
## col131 col133 col134 col135 col136 col137 col139 col140 col142 col144 
##     24     24     24     24     24     24     24     24     24     24

2 Análise exploratória

#-----------------------------------------------------------------------
# Análise exploratória.

ggplot(data = tb,
       mapping = aes(x = dose + 1, y = growth)) +
    facet_wrap(facets = ~species) +
    scale_x_log10() +
    geom_point()

# Curvas suaves com polinômio e natural splines.
ggplot(data = tb,
       mapping = aes(x = dose + 1, y = growth, color = species)) +
    facet_wrap(facets = ~isolate) +
    scale_x_log10() +
    # geom_point() +
    geom_jitter(width = 0.05) +
    geom_smooth(method = "lm",
                color = "cyan",
                formula = y ~ poly(x, degree = 4),
                se = FALSE) +
    geom_smooth(method = "lm",
                color = "orange",
                formula = y ~ splines::ns(x, df = 3),
                se = FALSE)

3 Ajuste em lote de Natural Splines e Regressão Não Linear

#-----------------------------------------------------------------------
# Ajustar o Natural Splines e regressão não linear por grupo para
# estimar as EC 50, 75 e 90.

# Particiona tabela criando uma lista.
tb_split <- tb %>%
    mutate(x = log10(dose + 1)) %>%
    group_split(isolate)
length(tb_split)
## [1] 60
# Essa função faz todo o trabalho sujo usando natural splines.
main_ns <- function(tb, plots = FALSE) {
    condition <- all(is.element(c("isolate", "growth", "x"),
                                names(tb)))
    if (!condition) {
        stop("Variables doesn't have the expected names")
    }
    if (plots) {
        plot(growth ~ x, data = tb)
        Sys.sleep(1)
    }
    # Ajuste o modelo usando Natural Splines.
    m0 <- lm(growth ~ splines::ns(x, df = 3),
             data = tb)
    # Modelo de médias.
    m_means <- lm(growth ~ factor(x),
                  data = tb)
    # Aplica o teste da falta de ajuste.
    lof_test <- anova(m_means, m0)[2, "Pr(>F)"]
    if (lof_test < 0.01) {
        # Valores únicos da variável independente.
        unique_x <- unique(tb$x)
        # Em caso de rejeição, tomar o melhor ajuste removendo uma dose.
        fits <- vector(mode = "list", length = length(unique_x))
        fit_f <- numeric(length(unique_x))
        u <- 1
        for (j in unique_x) {
            m1 <- lm(growth ~ splines::ns(x, df = 3),
                     data = tb, subset = x != j)
            m_means <- update(m1, formula = . ~ factor(x))
            fit_f[u] <- anova(m_means, m1)[2, "F"]
            fits[[u]] <- m1
            u <- u + 1
        }
        m0 <- fits[[which.min(fit_f)]]
    }
    # Predição na concentração 0.
    y0 <- predict(m0, newdata = list(x = 0))
    fracs <- c(0.5, 0.75, 0.9)
    y0_fracs <- y0 * fracs
    if (plots) {
        plot(growth ~ x, data = tb[rownames(m0$model), ])
        curve(predict(m0, newdata = list(x = x)),
              add = TRUE,
              col = "red")
        points(x = 0, y = y0, col = "red", pch = 19)
        abline(h = y0_fracs, lty = 2, col = "red")
        Sys.sleep(1)
    }
    # Determina a EC50 por solução numérica de raíz de função.
    ecs <-
        sapply(y0_fracs,
               FUN = function(y0_frac) {
                   xmax <- max(tb[rownames(m0$model), "x"])
                   root <- try({
                       uniroot(interval = c(0, 1),
                               check.conv = FALSE,
                               extendInt = "downX",
                               f = function(x) {
                                   predict(m0, newdata = list(x = x)) -
                                       y0_frac
                               })
                   })
                   # Calcula a EC50 na escala original.
                   ecs <- ifelse(class(root) != "try-error",
                                 root$root,
                                 NA_real_)
                   return(ecs)
               })
    if (plots) {
        plot(growth ~ x,
             data = tb[rownames(m0$model), ])
        curve(predict(m0, newdata = list(x = x)),
              add = TRUE,
              col = "red")
        abline(h = y0_fracs, lty = 2, col = "red")
        abline(v = ecs, lty = 2, col = "red")
        Sys.sleep(1)
    }
    # Retorno da função.
    ret <- list(isolate = tb$isolate[1],
                model = m0,
                ecs = ecs,
                excluded = ifelse(exists("fit_f"),
                                  unique_x[which.min(fit_f)],
                                  NA_real_))
    return(ret)
}
# main_ns(tb_split[[1]], plots = TRUE)

# Essa função faz todo o trabalho sujo usando regressão não linear.
main_nls <- function(tb, plots = FALSE) {
    condition <- all(is.element(c("isolate", "growth", "x"),
                                names(tb)))
    if (!condition) {
        stop("Variables doesn't have the expected names")
    }
    if (plots) {
        plot(growth ~ x, data = tb)
        Sys.sleep(1)
    }
    # Ajuste o modelo usando Natural Splines.
    m0 <- try(nls(growth ~ SSfpl(x, sup, inf, xmid, scal),
                  data = tb))
    # Modelo de médias.
    m_means <- lm(growth ~ factor(x),
                  data = tb)
    # Aplica o teste da falta de ajuste.
    if (class(m0) != "try-error") {
        lof_test <- anova(m0, m_means)[2, "Pr(>F)"]
    }
    if (class(m0) == "try-error" || lof_test < 0.01) {
        # Valores únicos da variável independente.
        unique_x <- unique(tb$x)
        # Em caso de rejeição, tomar o melhor ajuste removendo uma dose.
        fits <- vector(mode = "list", length = length(unique_x))
        fit_f <- numeric(length(unique_x))
        u <- 1
        for (j in unique_x) {
            j <- unique_x[4]
            m1 <- try(nls(growth ~ SSfpl(x, sup, inf, xmid, scal),
                          data = tb,
                          subset = x != j))
            if (class(m1) == "try-error") {
                fit_f[u] <- NA_real_
                fits[[u]] <- NULL
            } else {
                m_means <- lm(growth ~ factor(x),
                              data = tb,
                              subset = x != j)
                fit_f[u] <- anova(m1, m_means)[2, "F value"]
                fits[[u]] <- m1
            }
            u <- u + 1
        }
        wm <- which.min(fit_f)
        if (length(wm)) {
            m0 <- fits[[wm]]
        }
    }
    if (class(m0) == "try-error") {
        warning("Nenhum ajuste foi conseguido.")
        ret <- list(isolate = tb$isolate[1],
                    model = NULL,
                    ecs = rep(NA_real_, 3),
                    excluded = NA_real_)
        return(ret)
    }
    # Predição na concentração 0.
    y0 <- predict(m0, newdata = list(x = 0))
    fracs <- c(0.5, 0.75, 0.9)
    y0_fracs <- y0 * fracs
    if (plots) {
        plot(growth ~ x, data = eval(m0$data))
        curve(predict(m0, newdata = list(x = x)),
              add = TRUE,
              col = "red")
        points(x = 0, y = y0, col = "red", pch = 19)
        abline(h = y0_fracs, lty = 2, col = "red")
        Sys.sleep(1)
    }
    # Determina a EC50 por solução numérica de raíz de função.
    ecs <-
        sapply(y0_fracs,
               FUN = function(y0_frac) {
                   # xmax <- max(tb[rownames(m0$model), "x"])
                   root <- try({
                       uniroot(interval = c(0, 1),
                               check.conv = FALSE,
                               extendInt = "downX",
                               f = function(x) {
                                   predict(m0, newdata = list(x = x)) -
                                       y0_frac
                               })
                   })
                   # Calcula a EC50 na escala original.
                   ecs <- ifelse(class(root) != "try-error",
                                 root$root,
                                 NA_real_)
                   return(ecs)
               })
    if (plots) {
        plot(growth ~ x,
             data = eval(m0$data))
        curve(predict(m0, newdata = list(x = x)),
              add = TRUE,
              col = "red")
        abline(h = y0_fracs, lty = 2, col = "red")
        abline(v = ecs, lty = 2, col = "red")
        Sys.sleep(1)
    }
    # Retorno da função.
    ret <- list(isolate = tb$isolate[1],
                model = m0,
                ecs = ecs,
                excluded = ifelse(exists("fit_f"),
                                  unique_x[which.min(fit_f)],
                                  NA_real_))
    return(ret)
}
# main_nls(tb_split[[8]], plots = TRUE)
# Aplica para todos os isolados usando natural splines.
results_ns <- map(tb_split,
                  main_ns,
                  plots = FALSE)
## Error in uniroot(interval = c(0, 1), check.conv = FALSE, extendInt = "downX",  : 
##   no sign change found in 1000 iterations
## Error in uniroot(interval = c(0, 1), check.conv = FALSE, extendInt = "downX",  : 
##   no sign change found in 1000 iterations
## Error in uniroot(interval = c(0, 1), check.conv = FALSE, extendInt = "downX",  : 
##   no sign change found in 1000 iterations
length(results_ns)
## [1] 60
# Aplica para todos os isolados usando regressão não linear.
results_nls <- map(tb_split,
                   main_nls,
                   plots = FALSE)
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   step factor 0.000488281 reduced below 'minFactor' of 0.000976562
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in qr.solve(QR.B, cc) : singular matrix 'a' in solve
## Error in qr.solve(QR.B, cc) : singular matrix 'a' in solve
## Error in qr.solve(QR.B, cc) : singular matrix 'a' in solve
## Error in qr.solve(QR.B, cc) : singular matrix 'a' in solve
## Error in qr.solve(QR.B, cc) : singular matrix 'a' in solve
## Error in qr.solve(QR.B, cc) : singular matrix 'a' in solve
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in uniroot(interval = c(0, 1), check.conv = FALSE, extendInt = "downX",  : 
##   no sign change found in 1000 iterations
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   number of iterations exceeded maximum of 50
## Error in uniroot(interval = c(0, 1), check.conv = FALSE, extendInt = "downX",  : 
##   no sign change found in 1000 iterations
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
## Error in nls(y ~ cbind(1, 1/(1 + exp((xmid - x)/exp(lscal)))), data = xy,  : 
##   singular gradient
length(results_nls)
## [1] 60

4 Comparação das espécies

# ATTENTION: Algumas ECs não foram estimadas por haverem isolados muito
# resistentes ou o modelo não ajustar aos dados.
h <- c("ec50", "ec75", "ec90")
ecs <- map(results_ns, `[[`, "ecs")
ecs <- as.data.frame(invoke(rbind, ecs))
names(ecs) <- paste0(h, "_ns")
ecs_ns <- ecs

ecs <- map(results_nls, `[[`, "ecs")
ecs <- as.data.frame(invoke(rbind, ecs))
names(ecs) <- paste0(h, "_nls")
ecs_nls <- ecs

# Cria a tabela com as ECs estimadas pelas duas abordagens.
i <- 0
tb_ecs <- map_dfr(tb_split,
                  function(tb) {
                      i <<- i + 1
                      cbind(tibble(isolate = tb$isolate[1],
                                   species = tb$species[1]),
                            ecs_ns[i, ],
                            ecs_nls[i, ])
                  })
head(tb_ecs)
##   isolate      species   ec50_ns   ec75_ns   ec90_ns  ec50_nls
## 1  col006   C. melonis 0.8627601 0.5943487 0.3671571 0.7900251
## 2  col009   C. melonis 1.2136140 0.7408141 0.4282606 1.1546540
## 3  col012 C. nymphaeae 1.7049602 1.4357172 1.2214591        NA
## 4  col014 C. nymphaeae 1.0808188 0.7841488 0.5814281 1.0469814
## 5  col020 C. nymphaeae 1.1243386 0.8341567 0.6746854        NA
## 6  col021   C. melonis 0.8162477 0.5998677 0.4116120 1.2049423
##    ec75_nls  ec90_nls
## 1 0.6002328 0.4723721
## 2 0.7124168 0.4481827
## 3        NA        NA
## 4 0.7721453 0.5468526
## 5        NA        NA
## 6 0.6025464 0.3861407
# Volta com a transformação aplicada nas doses para ajuste do modelo.
tb_ecs <- tb_ecs %>%
    mutate_at(vars(starts_with("ec")), ~10^(.) - 1)
head(tb_ecs)
##   isolate      species   ec50_ns   ec75_ns   ec90_ns  ec50_nls ec75_nls
## 1  col006   C. melonis  6.290546  2.929604  1.328934  5.166306 2.983206
## 2  col009   C. melonis 15.353623  4.505720  1.680777 13.277561 4.157234
## 3  col012 C. nymphaeae 49.694427 26.272015 15.651720        NA       NA
## 4  col014 C. nymphaeae 11.045334  5.083435  2.814416 10.142468 4.917596
## 5  col020 C. nymphaeae 12.314921  5.825850  3.728086        NA       NA
## 6  col021   C. melonis  5.550096  2.979859  1.579954 15.030323 3.004483
##   ec90_nls
## 1 1.967373
## 2 1.806614
## 3       NA
## 4 2.522513
## 5       NA
## 6 1.432992
#-----------------------------------------------------------------------
# Teste para igualdade das distribuições de probabilidade.

# Conta o número de ECs não estimadas (missing) em cada espécie.
tb_ecs %>%
    group_by(species) %>%
    summarise_at(vars(starts_with("ec")), ~sum(is.na(.)))
## # A tibble: 2 x 7
##   species      ec50_ns ec75_ns ec90_ns ec50_nls ec75_nls ec90_nls
##   <fct>          <int>   <int>   <int>    <int>    <int>    <int>
## 1 C. melonis         0       0       0        1        1        1
## 2 C. nymphaeae       3       0       0       13       11       11
# Gráfico das distribuições acumuladas empíricas.
tb_ecs_long <- tb_ecs %>%
    gather(key = "ec", value = "value", starts_with("ec")) %>%
    separate(col = "ec",
             into = c("ec_val", "method"),
             remove = FALSE)

ggplot(data = tb_ecs_long,
       mapping = aes(x = value, color = species)) +
    facet_grid(facets = ec_val ~ method) +
    # facet_grid(facets = method ~ ec_val) +
    stat_ecdf() +
    theme(legend.position = c(0.975, 0.025),
          legend.justification = c(1, 0)) +
    labs(x = expression("Estimated concentration"),
         y = "Empirical cummulative density function",
         color = "Fungi\nspecies")

ggplot(data = tb_ecs_long,
       mapping = aes(x = value, color = species)) +
    facet_grid(facets = ec_val ~ method) +
    # facet_grid(facets = method ~ ec_val) +
    geom_density() +
    theme(legend.position = c(0.975, 0.975),
          legend.justification = c(1, 1)) +
    labs(x = expression("Estimated concentration"),
         y = "Empirical density function",
         color = "Fungi\nspecies")

ggplot(data = tb_ecs_long,
       mapping = aes(y = value, x = species)) +
    facet_grid(facets = method ~ ec_val) +
    geom_boxplot() +
    theme(legend.position = c(0.975, 0.025),
          legend.justification = c(1, 0)) +
    labs(x = "Species",
         y = "Empirical cummulative density function",
         color = "Fungi\nspecies")

# Aplica o teste para a hipótese de igualdade de distribuições de
# probabilidade.
tb_ecs_long %>%
    group_by(ec_val, method) %>%
    do({
        y_vals <- split(x = .$value, f = .$species)
        ks <- ks.test(x = y_vals[[1]], y = y_vals[[2]])
        data.frame(D = ks$statistic,
                   p_value = ks$p.value,
                   n1 = sum(is.finite(y_vals[[1]])),
                   n2 = sum(is.finite(y_vals[[2]])))
    })
## # A tibble: 6 x 6
## # Groups:   ec_val, method [6]
##   ec_val method     D   p_value    n1    n2
##   <chr>  <chr>  <dbl>     <dbl> <int> <int>
## 1 ec50   nls    0.262 0.376        29    17
## 2 ec50   ns     0.278 0.181        30    27
## 3 ec75   nls    0.407 0.0334       29    19
## 4 ec75   ns     0.433 0.00655      30    30
## 5 ec90   nls    0.481 0.00553      29    19
## 6 ec90   ns     0.567 0.0000874    30    30

5 Comparação das abordagens de estimação das ECs

#-----------------------------------------------------------------------
# Compara os métodos de estimação das ECs: ns vs nls.

names(tb_ecs_long)
## [1] "isolate" "species" "ec"      "ec_val"  "method"  "value"
tb_ecs_method <- tb_ecs_long %>%
    select(-ec) %>%
    spread(key = "method", value = "value")
head(tb_ecs_method)
##   isolate    species ec_val       nls        ns
## 1  col006 C. melonis   ec50  5.166306  6.290546
## 2  col006 C. melonis   ec75  2.983206  2.929604
## 3  col006 C. melonis   ec90  1.967373  1.328934
## 4  col009 C. melonis   ec50 13.277561 15.353623
## 5  col009 C. melonis   ec75  4.157234  4.505720
## 6  col009 C. melonis   ec90  1.806614  1.680777
# ggplot(data = tb_ecs_method,
#        mapping = aes(x = ns, y = nls)) +
#     facet_wrap(facets = ~ec_val + species) +
#     geom_point() +
#     geom_abline(intercept = 0, slope = 1, color = "gray") +
#     coord_equal()

ggplot(data = tb_ecs_method,
       mapping = aes(x = ns, y = nls, color = ec_val)) +
    facet_wrap(facets = ~species) +
    scale_x_log10() +
    scale_y_log10() +
    geom_point() +
    geom_abline(intercept = 0, slope = 1, color = "gray") +
    coord_equal() +
    labs(x = "log10 das estimativas por regressão por natural splines",
         y = "log10 das estimativas por regressão não linear")

6 Gráfico dos ajustes

#-----------------------------------------------------------------------
# Sobrepõe os ajustes.

pred <- data.frame(x = seq(0, 2, length.out = 43))

pred_ns <-
    map(results_ns,
        function(res) {
            if (!is.null(res$model)) {
                cbind(pred,
                      fit = predict(res$model, newdata = pred),
                      isolate = res$isolate,
                      model = "ns")
            }
        }) %>%
    bind_rows()
head(pred_ns)
##                x      fit isolate model
## 1...1 0.00000000 32.51784  col006    ns
## 2...2 0.04761905 32.23804  col006    ns
## 3...3 0.09523810 31.94367  col006    ns
## 4...4 0.14285714 31.62015  col006    ns
## 5...5 0.19047619 31.25293  col006    ns
## 6...6 0.23809524 30.82742  col006    ns
pred_nls <-
    map(results_nls,
        function(res) {
            if (!is.null(res$model)) {
                cbind(pred,
                      fit = predict(res$model, newdata = pred),
                      isolate = res$isolate,
                      model = "nls")
            }
        }) %>%
    bind_rows()
head(pred_nls)
##            x      fit isolate model
## 1 0.00000000 31.80239  col006   nls
## 2 0.04761905 31.78031  col006   nls
## 3 0.09523810 31.74516  col006   nls
## 4 0.14285714 31.68931  col006   nls
## 5 0.19047619 31.60081  col006   nls
## 6 0.23809524 31.46119  col006   nls
# Curvas suaves com polinômio e natural splines.
ggplot(data = tb,
       mapping = aes(x = log10(dose + 1), y = growth, color = species)) +
    facet_wrap(facets = ~isolate) +
    geom_jitter(width = 0.05) +
    geom_line(data = pred_ns,
              mapping = aes(y = fit, x = x),
              color = "cyan",
              inherit.aes = FALSE) +
    geom_line(data = pred_nls,
              mapping = aes(y = fit, x = x),
              color = "orange",
              inherit.aes = FALSE)

#-----------------------------------------------------------------------