1 Resposta dicotômica

# Pacotes.
library(lattice)
library(latticeExtra)

# Carrega os dados.
url <- paste0("http://archive.ics.uci.edu/ml/machine-learning-databases",
              "/credit-screening/crx.data")
cre <- read.csv(url, header = FALSE, na.string = "?")
names(cre)[16] <- "y"

summary(cre)
##     V1            V2              V3            V4         V5     
##  a   :210   Min.   :13.75   Min.   : 0.000   l   :  2   g   :519  
##  b   :468   1st Qu.:22.60   1st Qu.: 1.000   u   :519   gg  :  2  
##  NA's: 12   Median :28.46   Median : 2.750   y   :163   p   :163  
##             Mean   :31.57   Mean   : 4.759   NA's:  6   NA's:  6  
##             3rd Qu.:38.23   3rd Qu.: 7.207                        
##             Max.   :80.25   Max.   :28.000                        
##             NA's   :12                                            
##        V6            V7            V8         V9      V10    
##  c      :137   v      :399   Min.   : 0.000   f:329   f:395  
##  q      : 78   h      :138   1st Qu.: 0.165   t:361   t:295  
##  w      : 64   bb     : 59   Median : 1.000                  
##  i      : 59   ff     : 57   Mean   : 2.223                  
##  aa     : 54   j      :  8   3rd Qu.: 2.625                  
##  (Other):289   (Other): 20   Max.   :28.500                  
##  NA's   :  9   NA's   :  9                                   
##       V11       V12     V13          V14            V15           y      
##  Min.   : 0.0   f:374   g:625   Min.   :   0   Min.   :     0.0   -:383  
##  1st Qu.: 0.0   t:316   p:  8   1st Qu.:  75   1st Qu.:     0.0   +:307  
##  Median : 0.0           s: 57   Median : 160   Median :     5.0          
##  Mean   : 2.4                   Mean   : 184   Mean   :  1017.4          
##  3rd Qu.: 3.0                   3rd Qu.: 276   3rd Qu.:   395.5          
##  Max.   :67.0                   Max.   :2000   Max.   :100000.0          
##                                 NA's   :13
# Visualiza a resposta contra as preditoras métricas.
n <- sapply(cre[, -16], is.numeric)
f <- sprintf("y ~ %s",
             paste(names(cre)[1:15][n],
                   collapse = " + "))
xyplot(as.formula(f),
       outer = TRUE,
       data = cre,
       as.table = TRUE,
       jitter.y = TRUE,
       amount = 0.025,
       scales = list(x = list(relation = "free", log = FALSE))) +
    latticeExtra::layer(panel.smoother(x, y, method = lm))

xyplot(as.formula(f),
       outer = TRUE,
       data = na.omit(cre),
       as.table = TRUE,
       jitter.y = TRUE,
       amount = 0.025,
       scales = list(x = list(relation = "free", log = FALSE))) +
    latticeExtra::layer({
        mod <- glm(y ~ x, family = binomial)
        xp <- seq(min(x), max(x), length.out = 101)
        yp <- predict(mod, newdata = list(x = xp), type = "response")
        panel.lines(x = xp, y = yp + 1)
    })

# Visualiza contra as preditoras categóricas.
v <- names(cre)[1:15][sapply(cre[, -16], is.factor)]
length(v)
## [1] 9
keep <- logical(length(v))
names(keep) <- v

par(mfrow = c(3, 3))
for (i in v) {
    xt <- xtabs(as.formula(sprintf("~y + %s", i)), data = cre)
    # print(min(prop.table(xt)) > 0.1)
    if (min(prop.table(xt)) > 0.1) {
        keep[i] <- TRUE
    }
    mosaicplot(xt, main = NULL)
}

layout(1)

# Mantém só variáveis sem separação.
cre <- subset(cre,
              select = setdiff(names(cre),
                               names(which(!keep))))

# Casos completos.
cc <- complete.cases(cre)
table(cc)
## cc
## FALSE  TRUE 
##    36   654
# Elimina os casos perdidos.
cre <- cre[cc, ]

# # Ajusta o modelo.
# m0 <- glm(y ~ 1, data = cre, family = binomial)

# Ajusta o modelo.
m0 <- glm(y ~ ., data = cre, family = binomial)
summary(m0)
## 
## Call:
## glm(formula = y ~ ., family = binomial, data = cre)
## 
## Deviance Residuals: 
##     Min       1Q   Median       3Q      Max  
## -2.6989  -0.7016  -0.5552   0.7280   2.1010  
## 
## Coefficients:
##               Estimate Std. Error z value Pr(>|z|)    
## (Intercept) -1.6920085  0.3622721  -4.671 3.00e-06 ***
## V1b         -0.0494728  0.2141518  -0.231   0.8173    
## V2           0.0007079  0.0090987   0.078   0.9380    
## V3           0.0302197  0.0213279   1.417   0.1565    
## V8           0.2429321  0.0453001   5.363 8.20e-08 ***
## V10t         0.8537006  0.2654346   3.216   0.0013 ** 
## V11          0.2019482  0.0504121   4.006 6.18e-05 ***
## V12t        -0.0052904  0.2034309  -0.026   0.9793    
## V14         -0.0007156  0.0006189  -1.156   0.2476    
## V15          0.0005069  0.0001186   4.273 1.93e-05 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for binomial family taken to be 1)
## 
##     Null deviance: 900.75  on 653  degrees of freedom
## Residual deviance: 628.00  on 644  degrees of freedom
## AIC: 648
## 
## Number of Fisher Scoring iterations: 7
# Realiza seleção de preditoras com stepwise via BIC.
m1 <- step(m0, k = log(nrow(cre)))
## Start:  AIC=692.83
## y ~ V1 + V2 + V3 + V8 + V10 + V11 + V12 + V14 + V15
## 
##        Df Deviance    AIC
## - V12   1   628.00 686.35
## - V2    1   628.01 686.35
## - V1    1   628.05 686.40
## - V14   1   629.41 687.76
## - V3    1   629.99 688.34
## <none>      628.00 692.83
## - V10   1   638.13 696.48
## - V11   1   650.31 708.65
## - V8    1   662.47 720.82
## - V15   1   667.49 725.84
## 
## Step:  AIC=686.35
## y ~ V1 + V2 + V3 + V8 + V10 + V11 + V14 + V15
## 
##        Df Deviance    AIC
## - V2    1   628.01 679.87
## - V1    1   628.05 679.92
## - V14   1   629.44 681.30
## - V3    1   629.99 681.86
## <none>      628.00 686.35
## - V10   1   638.14 690.00
## - V11   1   650.34 702.20
## - V8    1   663.72 715.58
## - V15   1   667.50 719.36
## 
## Step:  AIC=679.87
## y ~ V1 + V3 + V8 + V10 + V11 + V14 + V15
## 
##        Df Deviance    AIC
## - V1    1   628.06 673.44
## - V14   1   629.45 674.83
## - V3    1   630.03 675.41
## <none>      628.01 679.87
## - V10   1   638.17 683.55
## - V11   1   650.68 696.06
## - V15   1   667.53 712.91
## - V8    1   667.57 712.95
## 
## Step:  AIC=673.44
## y ~ V3 + V8 + V10 + V11 + V14 + V15
## 
##        Df Deviance    AIC
## - V14   1   629.56 668.46
## - V3    1   630.09 668.99
## <none>      628.06 673.44
## - V10   1   638.35 677.25
## - V11   1   650.78 689.68
## - V15   1   667.53 706.43
## - V8    1   667.65 706.55
## 
## Step:  AIC=668.46
## y ~ V3 + V8 + V10 + V11 + V15
## 
##        Df Deviance    AIC
## - V3    1   632.19 664.60
## <none>      629.56 668.46
## - V10   1   639.44 671.86
## - V11   1   653.33 685.75
## - V8    1   668.29 700.70
## - V15   1   668.35 700.77
## 
## Step:  AIC=664.6
## y ~ V8 + V10 + V11 + V15
## 
##        Df Deviance    AIC
## <none>      632.19 664.60
## - V10   1   642.23 668.16
## - V11   1   657.78 683.71
## - V15   1   672.36 698.29
## - V8    1   673.97 699.91
summary(m1)
## 
## Call:
## glm(formula = y ~ V8 + V10 + V11 + V15, family = binomial, data = cre)
## 
## Deviance Residuals: 
##     Min       1Q   Median       3Q      Max  
## -2.6244  -0.6798  -0.5783   0.7310   1.9389  
## 
## Coefficients:
##               Estimate Std. Error z value Pr(>|z|)    
## (Intercept) -1.7140069  0.1572213 -10.902  < 2e-16 ***
## V8           0.2441045  0.0423819   5.760 8.43e-09 ***
## V10t         0.8394000  0.2618671   3.205  0.00135 ** 
## V11          0.2108932  0.0497906   4.236 2.28e-05 ***
## V15          0.0005055  0.0001186   4.263 2.02e-05 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for binomial family taken to be 1)
## 
##     Null deviance: 900.75  on 653  degrees of freedom
## Residual deviance: 632.19  on 649  degrees of freedom
## AIC: 642.19
## 
## Number of Fisher Scoring iterations: 7
# Realiza predição.
yp <- predict(m1, type = "response")

# Erro de classificação.
tb <- table(round(yp), cre$y)
tb
##    
##       -   +
##   0 310 102
##   1  48 194
# Percentual de acertos.
sum(diag(tb))/sum(tb)
## [1] 0.7706422

1.1 Usando o pacote caret

library(caret)

# Criando as partições de treino e validação.
set.seed(789)
intrain <- createDataPartition(y = cre$y,
                               p = 0.75,
                               list = FALSE)
cre_train <- cre[intrain, ]
cre_test <- cre[-intrain, ]
list(train = nrow(cre_train),
     test = nrow(cre_test),
     ratio = nrow(cre_train)/nrow(cre))

# Parametriza a valiação cruzada.
trctrl <- trainControl(method = "repeatedcv", number = 10, repeats = 3)

# Boosted Logistic Regression e outras opções.
set.seed(159)
fit <- train(y ~ .,
             data = cre_train,
             method = c("LogitBoost", "regLogistic", "plr")[1],
             trControl = trctrl)
fit

fit$finalModel

# Predição e matriz de confusão.
yp <- predict(fit, newdata = cre_test)
confusionMatrix(yp, cre_test$y)

2 Resposta politômica

2.1 Usando o VGAM

# Carrega o pacote.
library(VGAM)

#-----------------------------------------------------------------------
# Usando um par de preditoras para visualizar a fronteira.

# Ajuste do modelo.
fit <- vglm(Species ~ Sepal.Length + Sepal.Width,
            family = multinomial,
            data = iris)
summary(fit)
## 
## Call:
## vglm(formula = Species ~ Sepal.Length + Sepal.Width, family = multinomial, 
##     data = iris)
## 
## 
## Pearson residuals:
##                       Min         1Q     Median        3Q    Max
## log(mu[,1]/mu[,3]) -1.662  3.817e-13  8.162e-13 2.549e-06 0.1236
## log(mu[,2]/mu[,3]) -3.589 -4.853e-01 -3.069e-05 5.168e-01 2.1874
## 
## Coefficients: 
##                Estimate Std. Error z value Pr(>|z|)    
## (Intercept):1   93.6092    80.4706   1.163 0.244719    
## (Intercept):2   13.0459     3.0980   4.211 2.54e-05 ***
## Sepal.Length:1 -33.4706    28.0742  -1.192 0.233175    
## Sepal.Length:2  -1.9024     0.5169  -3.680 0.000233 ***
## Sepal.Width:1   27.9328    25.9237   1.078 0.281257    
## Sepal.Width:2   -0.4045     0.8626  -0.469 0.639117    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Number of linear predictors:  2 
## 
## Names of linear predictors: log(mu[,1]/mu[,3]), log(mu[,2]/mu[,3])
## 
## Residual deviance: 110.4801 on 294 degrees of freedom
## 
## Log-likelihood: -55.24 on 294 degrees of freedom
## 
## Number of iterations: 17 
## 
## Reference group is level  3  of the response
grid <- with(iris,
             expand.grid(
                 Sepal.Length = seq(min(Sepal.Length),
                                    max(Sepal.Length),
                                    length.out = 61),
                 Sepal.Width = seq(min(Sepal.Width),
                                   max(Sepal.Width),
                                   length.out = 61),
                 KEEP.OUT.ATTRS = FALSE))

prob <- predict(fit, newdata = grid, type = "response")
grid$pred <- apply(prob, MARGIN = 1, FUN = which.max)
grid$pred <- factor(grid$pred, labels = levels(iris$Species))

# Gráfico com pontos, classficações, fronteira e vetores de suporte.
plot(Sepal.Width ~ Sepal.Length,
     data = grid,
     col = as.integer(grid$pred),
     pch = 3)
points(Sepal.Width ~ Sepal.Length,
       data = iris,
       col = as.integer(iris$Species),
       pch = 19)

#-----------------------------------------------------------------------
# Usando todas as preditoras.

# Ajusta o modelo.
fit <- vglm(Species ~ ., family = multinomial, data = iris)

# Exibe o resumo do ajuste.
summary(fit)
## 
## Call:
## vglm(formula = Species ~ ., family = multinomial, data = iris)
## 
## 
## Pearson residuals:
##                           Min         1Q     Median        3Q       Max
## log(mu[,1]/mu[,3]) -0.0003362  7.294e-10  2.102e-09 9.960e-07 0.0003164
## log(mu[,2]/mu[,3]) -1.9700374 -3.420e-04 -4.358e-06 4.635e-04 2.5601905
## 
## Coefficients: 
##                 Estimate Std. Error z value Pr(>|z|)  
## (Intercept):1     35.361  25704.949   0.001   0.9989  
## (Intercept):2     42.638     25.708   1.659   0.0972 .
## Sepal.Length:1     9.637   7631.535   0.001   0.9990  
## Sepal.Length:2     2.465      2.394   1.030   0.3032  
## Sepal.Width:1     12.359   3557.648   0.003   0.9972  
## Sepal.Width:2      6.681      4.480   1.491   0.1359  
## Petal.Length:1   -23.214   5435.364  -0.004   0.9966  
## Petal.Length:2    -9.429      4.737  -1.990   0.0465 *
## Petal.Width:1    -34.102   8576.875  -0.004   0.9968  
## Petal.Width:2    -18.286      9.743  -1.877   0.0605 .
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Number of linear predictors:  2 
## 
## Names of linear predictors: log(mu[,1]/mu[,3]), log(mu[,2]/mu[,3])
## 
## Residual deviance: 11.8985 on 290 degrees of freedom
## 
## Log-likelihood: -5.9493 on 290 degrees of freedom
## 
## Number of iterations: 20 
## 
## Reference group is level  3  of the response
# Obtém as predições.
prob <- predict(fit, newdata = iris, type = "response")
pred <- apply(prob, MARGIN = 1, FUN = which.max)
pred <- factor(pred, labels = levels(iris$Species))

# Acurácia.
caret::confusionMatrix(pred, iris$Species)
## Confusion Matrix and Statistics
## 
##             Reference
## Prediction   setosa versicolor virginica
##   setosa         50          0         0
##   versicolor      0         49         1
##   virginica       0          1        49
## 
## Overall Statistics
##                                           
##                Accuracy : 0.9867          
##                  95% CI : (0.9527, 0.9984)
##     No Information Rate : 0.3333          
##     P-Value [Acc > NIR] : < 2.2e-16       
##                                           
##                   Kappa : 0.98            
##  Mcnemar's Test P-Value : NA              
## 
## Statistics by Class:
## 
##                      Class: setosa Class: versicolor Class: virginica
## Sensitivity                 1.0000            0.9800           0.9800
## Specificity                 1.0000            0.9900           0.9900
## Pos Pred Value              1.0000            0.9800           0.9800
## Neg Pred Value              1.0000            0.9900           0.9900
## Prevalence                  0.3333            0.3333           0.3333
## Detection Rate              0.3333            0.3267           0.3267
## Detection Prevalence        0.3333            0.3333           0.3333
## Balanced Accuracy           1.0000            0.9850           0.9850

2.2 Usando o caret

library(caret)

# Criando as partições de treino e validação.
set.seed(987)
intrain <- createDataPartition(y = iris$Species,
                               p = 0.75,
                               list = FALSE)
data_train <- iris[intrain, ]
data_test <- iris[-intrain, ]

# Parametriza a valiação cruzada.
trctrl <- trainControl(method = "repeatedcv",
                       number = 10,
                       repeats = 3)

# Penalized Multinomial Regression, usa a nnet::multinom().
set.seed(159)
fit <- train(Species ~ .,
             data = data_train,
             method = "multinom",
             trControl = trctrl)
## # weights:  18 (10 variable)
## initial  value 114.255678 
## iter  10 value 10.058950
## iter  20 value 3.313919
## iter  30 value 2.753694
## iter  40 value 2.344815
## iter  50 value 2.059893
## iter  60 value 1.078211
## iter  70 value 0.085099
## iter  80 value 0.052390
## iter  90 value 0.047081
## iter 100 value 0.044963
## final  value 0.044963 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 114.255678 
## iter  10 value 22.120228
## iter  20 value 20.421492
## iter  30 value 20.413323
## final  value 20.413322 
## converged
## # weights:  18 (10 variable)
## initial  value 114.255678 
## iter  10 value 10.087207
## iter  20 value 3.469245
## iter  30 value 3.008806
## iter  40 value 2.668432
## iter  50 value 2.547171
## iter  60 value 2.462195
## iter  70 value 2.396726
## iter  80 value 2.215847
## iter  90 value 2.170056
## iter 100 value 2.138382
## final  value 2.138382 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.608284
## iter  20 value 3.365568
## iter  30 value 2.725861
## iter  40 value 2.621696
## iter  50 value 2.497843
## iter  60 value 2.458804
## iter  70 value 2.334704
## iter  80 value 2.318875
## iter  90 value 2.299643
## iter 100 value 2.290231
## final  value 2.290231 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 21.694604
## iter  20 value 20.249971
## iter  30 value 20.245199
## final  value 20.245198 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.629290
## iter  20 value 3.558949
## iter  30 value 3.097638
## iter  40 value 2.994255
## iter  50 value 2.896003
## iter  60 value 2.871370
## iter  70 value 2.840173
## iter  80 value 2.835462
## iter  90 value 2.835159
## iter 100 value 2.833610
## final  value 2.833610 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.853687
## iter  20 value 2.721020
## iter  30 value 1.879755
## iter  40 value 1.707002
## iter  50 value 1.508190
## iter  60 value 1.436305
## iter  70 value 1.310874
## iter  80 value 1.208203
## iter  90 value 1.043079
## iter 100 value 0.991956
## final  value 0.991956 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 21.585455
## iter  20 value 19.933990
## iter  30 value 19.927415
## final  value 19.927413 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.874822
## iter  20 value 2.927391
## iter  30 value 2.311199
## iter  40 value 2.173429
## iter  50 value 2.046601
## iter  60 value 2.008326
## iter  70 value 1.975748
## iter  80 value 1.941134
## iter  90 value 1.937714
## iter 100 value 1.926236
## final  value 1.926236 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.337024
## iter  20 value 3.311259
## iter  30 value 2.684744
## iter  40 value 2.564240
## iter  50 value 2.355098
## iter  60 value 2.333758
## iter  70 value 2.295767
## iter  80 value 2.283864
## iter  90 value 2.233473
## iter 100 value 2.228374
## final  value 2.228374 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 22.178806
## iter  20 value 20.282813
## iter  30 value 20.276957
## final  value 20.276956 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.358893
## iter  20 value 3.461489
## iter  30 value 2.971587
## iter  40 value 2.832324
## iter  50 value 2.788085
## iter  60 value 2.770211
## iter  70 value 2.758005
## iter  80 value 2.748028
## iter  90 value 2.745491
## iter 100 value 2.743057
## final  value 2.743057 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.526752
## iter  20 value 2.986642
## iter  30 value 2.550081
## iter  40 value 2.470967
## iter  50 value 2.419740
## iter  60 value 2.342808
## iter  70 value 2.237692
## iter  80 value 2.226777
## iter  90 value 2.208501
## iter 100 value 2.198917
## final  value 2.198917 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 22.476981
## iter  20 value 20.728722
## iter  30 value 20.722042
## final  value 20.722041 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.548090
## iter  20 value 3.224451
## iter  30 value 2.947099
## iter  40 value 2.884974
## iter  50 value 2.830558
## iter  60 value 2.798373
## iter  70 value 2.772901
## iter  80 value 2.767892
## iter  90 value 2.765677
## iter 100 value 2.763441
## final  value 2.763441 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.687019
## iter  20 value 0.464187
## iter  30 value 0.106958
## iter  40 value 0.083360
## iter  50 value 0.063492
## iter  60 value 0.032194
## iter  70 value 0.024215
## iter  80 value 0.019023
## iter  90 value 0.016613
## iter 100 value 0.015365
## final  value 0.015365 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 21.366701
## iter  20 value 19.431403
## iter  30 value 19.422954
## final  value 19.422953 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.707722
## iter  20 value 0.897126
## iter  30 value 0.749946
## iter  40 value 0.637899
## iter  50 value 0.590167
## iter  60 value 0.572858
## iter  70 value 0.566472
## iter  80 value 0.556389
## iter  90 value 0.550718
## iter 100 value 0.549579
## final  value 0.549579 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 114.255678 
## iter  10 value 16.896707
## iter  20 value 2.733832
## iter  30 value 2.236462
## iter  40 value 2.213469
## iter  50 value 2.173935
## iter  60 value 2.153011
## iter  70 value 2.144302
## iter  80 value 2.139756
## iter  90 value 2.129501
## iter 100 value 2.125471
## final  value 2.125471 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 114.255678 
## iter  10 value 23.736852
## iter  20 value 20.471540
## iter  30 value 20.465845
## final  value 20.465844 
## converged
## # weights:  18 (10 variable)
## initial  value 114.255678 
## iter  10 value 16.910517
## iter  20 value 3.087470
## iter  30 value 2.906916
## iter  40 value 2.856550
## iter  50 value 2.844116
## iter  60 value 2.841072
## iter  70 value 2.838936
## iter  80 value 2.837493
## iter  90 value 2.837154
## iter 100 value 2.836331
## final  value 2.836331 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 9.614492
## iter  20 value 2.139567
## iter  30 value 2.025601
## iter  40 value 1.894867
## iter  50 value 1.780075
## iter  60 value 1.727265
## iter  70 value 1.708027
## iter  80 value 1.664606
## iter  90 value 1.462858
## iter 100 value 1.415363
## final  value 1.415363 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 21.150066
## iter  20 value 19.624107
## iter  30 value 19.619922
## final  value 19.619921 
## converged
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 9.640077
## iter  20 value 2.371402
## iter  30 value 2.309160
## iter  40 value 2.240246
## iter  50 value 2.218971
## iter  60 value 2.201167
## iter  70 value 2.179289
## iter  80 value 2.175295
## iter  90 value 2.171724
## iter 100 value 2.170403
## final  value 2.170403 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.368773
## iter  20 value 3.428562
## iter  30 value 2.768394
## iter  40 value 2.581730
## iter  50 value 2.419817
## iter  60 value 2.375082
## iter  70 value 2.355333
## iter  80 value 2.287915
## iter  90 value 2.249191
## iter 100 value 2.231214
## final  value 2.231214 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 22.135801
## iter  20 value 20.286577
## iter  30 value 20.279960
## final  value 20.279959 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.389904
## iter  20 value 3.557514
## iter  30 value 3.068576
## iter  40 value 2.914015
## iter  50 value 2.878056
## iter  60 value 2.862775
## iter  70 value 2.828644
## iter  80 value 2.815578
## iter  90 value 2.814121
## iter 100 value 2.813286
## final  value 2.813286 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 11.886832
## iter  20 value 3.711261
## iter  30 value 2.947521
## iter  40 value 2.746559
## iter  50 value 2.474241
## iter  60 value 2.428065
## iter  70 value 2.349470
## iter  80 value 2.328919
## iter  90 value 2.249387
## iter 100 value 2.242101
## final  value 2.242101 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 22.103776
## iter  20 value 20.074399
## iter  30 value 20.068171
## final  value 20.068170 
## converged
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 11.906517
## iter  20 value 3.832631
## iter  30 value 3.208825
## iter  40 value 3.025699
## iter  50 value 2.933498
## iter  60 value 2.900151
## iter  70 value 2.869737
## iter  80 value 2.835172
## iter  90 value 2.833778
## iter 100 value 2.831347
## final  value 2.831347 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.521549
## iter  20 value 2.382716
## iter  30 value 1.475890
## iter  40 value 1.256580
## iter  50 value 1.101588
## iter  60 value 1.029033
## iter  70 value 0.910071
## iter  80 value 0.863022
## iter  90 value 0.829202
## iter 100 value 0.796240
## final  value 0.796240 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 22.087535
## iter  20 value 20.202474
## iter  30 value 20.195091
## final  value 20.195089 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.542806
## iter  20 value 2.674549
## iter  30 value 2.088542
## iter  40 value 1.979874
## iter  50 value 1.858244
## iter  60 value 1.819887
## iter  70 value 1.770612
## iter  80 value 1.762224
## iter  90 value 1.747811
## iter 100 value 1.743595
## final  value 1.743595 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 11.164916
## iter  20 value 3.642719
## iter  30 value 2.998992
## iter  40 value 2.740063
## iter  50 value 2.553786
## iter  60 value 2.495677
## iter  70 value 2.363403
## iter  80 value 2.345552
## iter  90 value 2.318312
## iter 100 value 2.303538
## final  value 2.303538 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 21.498559
## iter  20 value 19.426595
## iter  30 value 19.419110
## final  value 19.419109 
## converged
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 11.184895
## iter  20 value 3.739934
## iter  30 value 3.206687
## iter  40 value 2.990811
## iter  50 value 2.922457
## iter  60 value 2.894806
## iter  70 value 2.855592
## iter  80 value 2.842779
## iter  90 value 2.830631
## iter 100 value 2.825832
## final  value 2.825832 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 10.653084
## iter  20 value 3.459082
## iter  30 value 2.886125
## iter  40 value 2.666738
## iter  50 value 2.457834
## iter  60 value 2.384016
## iter  70 value 2.339957
## iter  80 value 2.325396
## iter  90 value 2.127211
## iter 100 value 2.110871
## final  value 2.110871 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 21.906128
## iter  20 value 20.480904
## iter  30 value 20.476064
## final  value 20.476063 
## converged
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 10.676310
## iter  20 value 3.609304
## iter  30 value 3.160464
## iter  40 value 2.951682
## iter  50 value 2.903116
## iter  60 value 2.884291
## iter  70 value 2.859536
## iter  80 value 2.847768
## iter  90 value 2.840694
## iter 100 value 2.836514
## final  value 2.836514 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.150188
## iter  20 value 0.170193
## iter  30 value 0.040198
## iter  40 value 0.021669
## iter  50 value 0.019203
## iter  60 value 0.014318
## iter  70 value 0.013841
## iter  80 value 0.011215
## iter  90 value 0.010691
## iter 100 value 0.009751
## final  value 0.009751 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 20.952140
## iter  20 value 18.688561
## iter  30 value 18.680147
## final  value 18.680146 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.172148
## iter  20 value 0.566791
## iter  30 value 0.521028
## iter  40 value 0.482339
## iter  50 value 0.466406
## iter  60 value 0.458861
## iter  70 value 0.452270
## iter  80 value 0.444979
## iter  90 value 0.441979
## iter 100 value 0.438323
## final  value 0.438323 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 10.533456
## iter  20 value 3.320200
## iter  30 value 2.740985
## iter  40 value 2.605528
## iter  50 value 2.479773
## iter  60 value 2.412366
## iter  70 value 2.341897
## iter  80 value 2.322930
## iter  90 value 2.300208
## iter 100 value 2.285191
## final  value 2.285191 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 21.866147
## iter  20 value 20.879901
## iter  30 value 20.875694
## final  value 20.875693 
## converged
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 10.555694
## iter  20 value 3.500634
## iter  30 value 3.088693
## iter  40 value 2.962627
## iter  50 value 2.890929
## iter  60 value 2.873477
## iter  70 value 2.866132
## iter  80 value 2.851656
## iter  90 value 2.845177
## iter 100 value 2.841772
## final  value 2.841772 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.853444
## iter  20 value 2.780161
## iter  30 value 2.445493
## iter  40 value 2.366060
## iter  50 value 2.313306
## iter  60 value 2.286798
## iter  70 value 2.219912
## iter  80 value 2.214296
## iter  90 value 2.183693
## iter 100 value 2.168964
## final  value 2.168964 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 21.549300
## iter  20 value 19.835653
## iter  30 value 19.828405
## final  value 19.828404 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.874976
## iter  20 value 2.956613
## iter  30 value 2.698845
## iter  40 value 2.618609
## iter  50 value 2.605802
## iter  60 value 2.594572
## iter  70 value 2.590589
## iter  80 value 2.586455
## iter  90 value 2.585758
## iter 100 value 2.584765
## final  value 2.584765 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 14.923060
## iter  20 value 1.964673
## iter  30 value 1.636347
## iter  40 value 1.602874
## iter  50 value 1.577700
## iter  60 value 1.546491
## iter  70 value 1.397358
## iter  80 value 1.366959
## iter  90 value 1.357536
## iter 100 value 1.327677
## final  value 1.327677 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 23.067727
## iter  20 value 19.972618
## iter  30 value 19.970337
## iter  30 value 19.970337
## iter  30 value 19.970337
## final  value 19.970337 
## converged
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 14.940953
## iter  20 value 2.371331
## iter  30 value 2.289167
## iter  40 value 2.279977
## iter  50 value 2.273071
## iter  60 value 2.271010
## iter  70 value 2.270465
## iter  80 value 2.269334
## iter  90 value 2.268255
## iter 100 value 2.267388
## final  value 2.267388 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 12.583378
## iter  20 value 3.898478
## iter  30 value 2.759048
## iter  40 value 2.630367
## iter  50 value 2.477005
## iter  60 value 2.246793
## iter  70 value 1.953055
## iter  80 value 1.934780
## iter  90 value 1.930418
## iter 100 value 1.929158
## final  value 1.929158 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 24.432169
## iter  20 value 20.813927
## final  value 20.812337 
## converged
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 12.604731
## iter  20 value 4.003439
## iter  30 value 3.076894
## iter  40 value 2.979778
## iter  50 value 2.909899
## iter  60 value 2.884447
## iter  70 value 2.847221
## iter  80 value 2.836751
## iter  90 value 2.835513
## iter 100 value 2.835434
## final  value 2.835434 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 11.539835
## iter  20 value 3.228376
## iter  30 value 2.765767
## iter  40 value 2.547794
## iter  50 value 2.433012
## iter  60 value 2.350842
## iter  70 value 2.287578
## iter  80 value 2.274032
## iter  90 value 2.079071
## iter 100 value 2.074303
## final  value 2.074303 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 22.324207
## iter  20 value 20.342682
## iter  30 value 20.334969
## final  value 20.334967 
## converged
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 11.561386
## iter  20 value 3.439069
## iter  30 value 3.107322
## iter  40 value 2.946091
## iter  50 value 2.904852
## iter  60 value 2.885528
## iter  70 value 2.853939
## iter  80 value 2.840727
## iter  90 value 2.835199
## iter 100 value 2.831364
## final  value 2.831364 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.597095
## iter  20 value 3.506591
## iter  30 value 2.854329
## iter  40 value 2.725586
## iter  50 value 2.528168
## iter  60 value 2.440575
## iter  70 value 2.340382
## iter  80 value 2.322020
## iter  90 value 2.286262
## iter 100 value 2.276409
## final  value 2.276409 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 22.502472
## iter  20 value 20.771672
## iter  30 value 20.764787
## final  value 20.764785 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.618818
## iter  20 value 3.660464
## iter  30 value 3.150695
## iter  40 value 2.970631
## iter  50 value 2.914183
## iter  60 value 2.888638
## iter  70 value 2.874133
## iter  80 value 2.847959
## iter  90 value 2.844346
## iter 100 value 2.842027
## final  value 2.842027 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 8.416104
## iter  20 value 0.375657
## iter  30 value 0.093820
## iter  40 value 0.021593
## iter  50 value 0.013219
## iter  60 value 0.012122
## iter  70 value 0.008820
## iter  80 value 0.005902
## iter  90 value 0.004538
## iter 100 value 0.004168
## final  value 0.004168 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 19.616272
## iter  20 value 18.128422
## iter  30 value 18.121328
## final  value 18.121327 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 8.437607
## iter  20 value 0.717124
## iter  30 value 0.568591
## iter  40 value 0.441653
## iter  50 value 0.432775
## iter  60 value 0.418673
## iter  70 value 0.414476
## iter  80 value 0.409278
## iter  90 value 0.405466
## iter 100 value 0.402052
## final  value 0.402052 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.776361
## iter  20 value 3.387383
## iter  30 value 2.733112
## iter  40 value 2.541947
## iter  50 value 2.359174
## iter  60 value 2.323200
## iter  70 value 2.309241
## iter  80 value 2.255203
## iter  90 value 2.196342
## iter 100 value 2.188389
## final  value 2.188389 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 22.545790
## iter  20 value 20.433209
## iter  30 value 20.426336
## final  value 20.426335 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.798433
## iter  20 value 3.524797
## iter  30 value 3.000039
## iter  40 value 2.852180
## iter  50 value 2.800368
## iter  60 value 2.778511
## iter  70 value 2.764575
## iter  80 value 2.748001
## iter  90 value 2.744716
## iter 100 value 2.742835
## final  value 2.742835 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 114.255678 
## iter  10 value 9.826426
## iter  20 value 2.975276
## iter  30 value 2.464157
## iter  40 value 2.019901
## iter  50 value 1.852295
## iter  60 value 1.147724
## iter  70 value 0.350121
## iter  80 value 0.151463
## iter  90 value 0.056127
## iter 100 value 0.003360
## final  value 0.003360 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 114.255678 
## iter  10 value 21.940084
## iter  20 value 20.290368
## iter  30 value 20.282435
## final  value 20.282433 
## converged
## # weights:  18 (10 variable)
## initial  value 114.255678 
## iter  10 value 9.853953
## iter  20 value 3.190129
## iter  30 value 2.827557
## iter  40 value 2.552238
## iter  50 value 2.442922
## iter  60 value 2.332185
## iter  70 value 2.196599
## iter  80 value 2.163835
## iter  90 value 2.131875
## iter 100 value 2.127668
## final  value 2.127668 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.704293
## iter  20 value 3.738157
## iter  30 value 3.081312
## iter  40 value 2.811885
## iter  50 value 2.564089
## iter  60 value 2.508378
## iter  70 value 2.455351
## iter  80 value 2.395447
## iter  90 value 2.287156
## iter 100 value 2.276268
## final  value 2.276268 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 21.434528
## iter  20 value 19.706090
## iter  30 value 19.699379
## final  value 19.699378 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.726163
## iter  20 value 3.864694
## iter  30 value 3.318783
## iter  40 value 3.062209
## iter  50 value 2.973490
## iter  60 value 2.937337
## iter  70 value 2.898941
## iter  80 value 2.867438
## iter  90 value 2.843001
## iter 100 value 2.839720
## final  value 2.839720 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 13.160938
## iter  20 value 2.461714
## iter  30 value 1.996681
## iter  40 value 1.983673
## iter  50 value 1.982675
## iter  60 value 1.967616
## iter  70 value 1.960090
## iter  80 value 1.954125
## iter  90 value 1.948286
## iter 100 value 1.944641
## final  value 1.944641 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 22.295387
## iter  20 value 20.134450
## final  value 20.133453 
## converged
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 13.181480
## iter  20 value 2.989987
## iter  30 value 2.831725
## iter  40 value 2.805669
## iter  50 value 2.791145
## iter  60 value 2.756508
## iter  70 value 2.751284
## iter  80 value 2.748310
## iter  90 value 2.746018
## iter 100 value 2.743410
## final  value 2.743410 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 12.781897
## iter  20 value 3.485193
## iter  30 value 2.866932
## iter  40 value 2.595311
## iter  50 value 2.455839
## iter  60 value 2.417514
## iter  70 value 2.309864
## iter  80 value 2.294372
## iter  90 value 2.244235
## iter 100 value 2.234667
## final  value 2.234667 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 22.950130
## iter  20 value 20.960036
## iter  30 value 20.952764
## final  value 20.952763 
## converged
## # weights:  18 (10 variable)
## initial  value 113.157066 
## iter  10 value 12.800812
## iter  20 value 3.648241
## iter  30 value 3.159915
## iter  40 value 2.938728
## iter  50 value 2.904593
## iter  60 value 2.887298
## iter  70 value 2.861552
## iter  80 value 2.845514
## iter  90 value 2.840125
## iter 100 value 2.838223
## final  value 2.838223 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.710947
## iter  20 value 3.497026
## iter  30 value 2.703753
## iter  40 value 2.618162
## iter  50 value 2.522601
## iter  60 value 2.354803
## iter  70 value 2.281699
## iter  80 value 2.261934
## iter  90 value 2.095505
## iter 100 value 2.092118
## final  value 2.092118 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 22.411870
## iter  20 value 20.496966
## iter  30 value 20.490698
## final  value 20.490697 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.732120
## iter  20 value 3.619062
## iter  30 value 3.023467
## iter  40 value 2.938373
## iter  50 value 2.875497
## iter  60 value 2.856747
## iter  70 value 2.842300
## iter  80 value 2.826151
## iter  90 value 2.823403
## iter 100 value 2.819824
## final  value 2.819824 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.902014
## iter  20 value 2.094202
## iter  30 value 1.315588
## iter  40 value 1.190280
## iter  50 value 1.156381
## iter  60 value 0.962627
## iter  70 value 0.862794
## iter  80 value 0.825826
## iter  90 value 0.773550
## iter 100 value 0.634255
## final  value 0.634255 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 21.926222
## iter  20 value 20.184752
## iter  30 value 20.178877
## final  value 20.178876 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 10.924581
## iter  20 value 2.448728
## iter  30 value 2.043111
## iter  40 value 1.979604
## iter  50 value 1.884443
## iter  60 value 1.876729
## iter  70 value 1.841300
## iter  80 value 1.834826
## iter  90 value 1.833027
## iter 100 value 1.829532
## final  value 1.829532 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 114.255678 
## iter  10 value 11.837199
## iter  20 value 3.441463
## iter  30 value 2.873366
## iter  40 value 2.658225
## iter  50 value 2.452165
## iter  60 value 2.400712
## iter  70 value 2.308646
## iter  80 value 2.293888
## iter  90 value 2.163452
## iter 100 value 2.150172
## final  value 2.150172 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 114.255678 
## iter  10 value 22.285476
## iter  20 value 20.580793
## iter  30 value 20.573901
## final  value 20.573900 
## converged
## # weights:  18 (10 variable)
## initial  value 114.255678 
## iter  10 value 11.857175
## iter  20 value 3.603846
## iter  30 value 3.168904
## iter  40 value 2.957697
## iter  50 value 2.914638
## iter  60 value 2.892919
## iter  70 value 2.870918
## iter  80 value 2.857006
## iter  90 value 2.846988
## iter 100 value 2.844076
## final  value 2.844076 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.825292
## iter  20 value 3.692065
## iter  30 value 2.930938
## iter  40 value 2.662292
## iter  50 value 2.447062
## iter  60 value 2.400162
## iter  70 value 2.196491
## iter  80 value 2.178661
## iter  90 value 2.115359
## iter 100 value 2.109406
## final  value 2.109406 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 22.355794
## iter  20 value 20.675284
## iter  30 value 20.669042
## final  value 20.669040 
## converged
## # weights:  18 (10 variable)
## initial  value 112.058453 
## iter  10 value 11.846023
## iter  20 value 3.826636
## iter  30 value 3.200056
## iter  40 value 2.968267
## iter  50 value 2.913320
## iter  60 value 2.882034
## iter  70 value 2.835163
## iter  80 value 2.834883
## iter  90 value 2.834749
## iter 100 value 2.834688
## final  value 2.834688 
## stopped after 100 iterations
## # weights:  18 (10 variable)
## initial  value 125.241801 
## iter  10 value 23.853936
## iter  20 value 21.517212
## iter  30 value 21.505461
## final  value 21.505458 
## converged
fit
## Penalized Multinomial Regression 
## 
## 114 samples
##   4 predictors
##   3 classes: 'setosa', 'versicolor', 'virginica' 
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold, repeated 3 times) 
## Summary of sample sizes: 104, 102, 102, 102, 102, 102, ... 
## Resampling results across tuning parameters:
## 
##   decay  Accuracy   Kappa    
##   0e+00  0.9678283  0.9517730
##   1e-04  0.9622727  0.9434397
##   1e-01  0.9717172  0.9576132
## 
## Accuracy was used to select the optimal model using  the largest value.
## The final value used for the model was decay = 0.1.
fit$finalModel
## Call:
## nnet::multinom(formula = .outcome ~ ., data = dat, decay = param$decay)
## 
## Coefficients:
##            (Intercept) Sepal.Length Sepal.Width Petal.Length Petal.Width
## versicolor     1.32149   -0.2153398   -2.236748     2.530654  -0.3423041
## virginica     -2.62065   -2.5074644   -3.600806     5.503678   4.2504433
## 
## Residual Deviance: 43.01092 
## AIC: 63.01092
# Predição e matriz de confusão.
yp <- predict(fit, newdata = data_test)
confusionMatrix(yp, data_test$Species)
## Confusion Matrix and Statistics
## 
##             Reference
## Prediction   setosa versicolor virginica
##   setosa         12          0         0
##   versicolor      0         11         1
##   virginica       0          1        11
## 
## Overall Statistics
##                                           
##                Accuracy : 0.9444          
##                  95% CI : (0.8134, 0.9932)
##     No Information Rate : 0.3333          
##     P-Value [Acc > NIR] : 1.728e-14       
##                                           
##                   Kappa : 0.9167          
##  Mcnemar's Test P-Value : NA              
## 
## Statistics by Class:
## 
##                      Class: setosa Class: versicolor Class: virginica
## Sensitivity                 1.0000            0.9167           0.9167
## Specificity                 1.0000            0.9583           0.9583
## Pos Pred Value              1.0000            0.9167           0.9167
## Neg Pred Value              1.0000            0.9583           0.9583
## Prevalence                  0.3333            0.3333           0.3333
## Detection Rate              0.3333            0.3056           0.3056
## Detection Prevalence        0.3333            0.3333           0.3333
## Balanced Accuracy           1.0000            0.9375           0.9375
25px