Getting data – Programming Task Example (examples 3 and 4 in the note)

progTaskData <- read.table("http://www.cnachtsheim-text.csom.umn.edu/Kutner/Chapter%2014%20Data%20Sets/CH14TA01.txt", sep ="" , header = FALSE)
progTaskData
   V1 V2       V3
1  14  0 0.310262
2  29  0 0.835263
3   6  0 0.109996
4  25  1 0.726602
5  18  1 0.461837
6   4  0 0.082130
7  18  0 0.461837
8  12  0 0.245666
9  22  1 0.620812
10  6  0 0.109996
11 30  1 0.856299
12 11  0 0.216980
13 30  1 0.856299
14  5  0 0.095154
15 20  1 0.542404
16 13  0 0.276802
17  9  0 0.167100
18 32  1 0.891664
19 24  0 0.693379
20 13  1 0.276802
21 19  0 0.502134
22  4  0 0.082130
23 28  1 0.811825
24 22  1 0.620812
25  8  1 0.145815
progTaskData <- progTaskData[,1:2] # Remove the last column to make the data set more practical

#Look at the first 6 entries
head(progTaskData)
  V1 V2
1 14  0
2 29  0
3  6  0
4 25  1
5 18  1
6  4  0
names(progTaskData) <- c( "X" , "Y")
head(progTaskData)
   X Y
1 14 0
2 29 0
3  6 0
4 25 1
5 18 1
6  4 0

Scatter plot of the data and lowess fit

Example 3 part 1 and 2

library(ggplot2)
library(cowplot)

sp <- ggplot(progTaskData, aes(x = X, y = Y)) + geom_point(color = "steelblue") + theme_bw() 
sp

lf <- ggplot(progTaskData, aes(x = X, y = Y)) + geom_point(color = "steelblue") + theme_bw() +
  geom_smooth(method = "loess",  se = FALSE, color = "red") 

plot_grid(sp, lf)
## `geom_smooth()` using formula = 'y ~ x'

Scatter plot with Lowess and logistic mean response functions

Example 3 part 4

ggplot(progTaskData, aes(x = X, y = Y)) + geom_point(color = "steelblue") + theme_bw() +
  geom_smooth(method = "loess",  se = FALSE, color = "red") + 
  geom_smooth(method = "glm", method.args = list(family = "binomial"), se = FALSE, color = "steelblue")

Fit a logistic regression model

Example 3 part 5 and 6

Estimated Logistic Mean Response Function–Programming Task Example

?glm
logitfit <- glm(Y ~ X, family = binomial(link = "logit"), progTaskData)
summary(logitfit)

Call:
glm(formula = Y ~ X, family = binomial(link = "logit"), data = progTaskData)

Deviance Residuals: 
    Min       1Q   Median       3Q      Max  
-1.8992  -0.7509  -0.4140   0.7992   1.9624  

Coefficients:
            Estimate Std. Error z value Pr(>|z|)  
(Intercept) -3.05970    1.25935  -2.430   0.0151 *
X            0.16149    0.06498   2.485   0.0129 *
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

(Dispersion parameter for binomial family taken to be 1)

    Null deviance: 34.296  on 24  degrees of freedom
Residual deviance: 25.425  on 23  degrees of freedom
AIC: 29.425

Number of Fisher Scoring iterations: 4
# Fitted values

progTaskData
    X Y
1  14 0
2  29 0
3   6 0
4  25 1
5  18 1
6   4 0
7  18 0
8  12 0
9  22 1
10  6 0
11 30 1
12 11 0
13 30 1
14  5 0
15 20 1
16 13 0
17  9 0
18 32 1
19 24 0
20 13 1
21 19 0
22  4 0
23 28 1
24 22 1
25  8 1
logitfit$fitted.values # This gives fitted values from the model for each case
         1          2          3          4          5          6          7 
0.31026237 0.83526292 0.10999616 0.72660237 0.46183704 0.08213002 0.46183704 
         8          9         10         11         12         13         14 
0.24566554 0.62081158 0.10999616 0.85629862 0.21698039 0.85629862 0.09515416 
        15         16         17         18         19         20         21 
0.54240353 0.27680234 0.16709980 0.89166416 0.69337941 0.27680234 0.50213414 
        22         23         24         25 
0.08213002 0.81182461 0.62081158 0.14581508 
cbind(progTaskData, 'fitted' = logitfit$fitted.values) # Table to make it easier to read
    X Y     fitted
1  14 0 0.31026237
2  29 0 0.83526292
3   6 0 0.10999616
4  25 1 0.72660237
5  18 1 0.46183704
6   4 0 0.08213002
7  18 0 0.46183704
8  12 0 0.24566554
9  22 1 0.62081158
10  6 0 0.10999616
11 30 1 0.85629862
12 11 0 0.21698039
13 30 1 0.85629862
14  5 0 0.09515416
15 20 1 0.54240353
16 13 0 0.27680234
17  9 0 0.16709980
18 32 1 0.89166416
19 24 0 0.69337941
20 13 1 0.27680234
21 19 0 0.50213414
22  4 0 0.08213002
23 28 1 0.81182461
24 22 1 0.62081158
25  8 1 0.14581508

Odds ratio

Example 4 part 1

OR <- exp(coef(logitfit)[2]) # you can also do exp(0.16149) by using summary(logitfit) output
OR
       X 
1.175256 

Repeat Observations-Binomial Outcomes

CouponData <- read.table("http://www.cnachtsheim-text.csom.umn.edu/Kutner/Chapter%2014%20Data%20Sets/CH14TA02.txt", sep ="" , header = FALSE)
head(CouponData)
  V1  V2  V3    V4
1  5 200  30 0.150
2 10 200  55 0.275
3 15 200  70 0.350
4 20 200 100 0.500
5 30 200 137 0.685
CouponData <- CouponData[,1:3] # Remove the last column to make the data set more practical

#Look at the first 6 entries
head(CouponData)
  V1  V2  V3
1  5 200  30
2 10 200  55
3 15 200  70
4 20 200 100
5 30 200 137
names(CouponData) <- c( "X", "n" , "Y")
head(CouponData)
   X   n   Y
1  5 200  30
2 10 200  55
3 15 200  70
4 20 200 100
5 30 200 137

Example 5 part 1

y <- c(rep(1, sum(CouponData$Y)), rep(0, 1000 - sum(CouponData$Y)))
y
   [1] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
  [38] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
  [75] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [112] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [149] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [186] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [223] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [260] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [297] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [334] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [371] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [408] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [445] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [482] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [519] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [556] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [593] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [630] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [667] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [704] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [741] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [778] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [815] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [852] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [889] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [926] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [963] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
[1000] 0
x <- c(rep(CouponData$X, CouponData$Y), rep(CouponData$X, 200 - CouponData$Y))
x
   [1]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
  [25]  5  5  5  5  5  5 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
  [49] 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
  [73] 10 10 10 10 10 10 10 10 10 10 10 10 10 15 15 15 15 15 15 15 15 15 15 15
  [97] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15
 [121] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15
 [145] 15 15 15 15 15 15 15 15 15 15 15 20 20 20 20 20 20 20 20 20 20 20 20 20
 [169] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [193] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [217] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [241] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 30 30 30 30 30 30 30 30 30
 [265] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [289] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [313] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [337] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [361] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [385] 30 30 30 30 30 30 30 30  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [409]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [433]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [457]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [481]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [505]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [529]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [553]  5  5  5  5  5  5  5  5  5  5 10 10 10 10 10 10 10 10 10 10 10 10 10 10
 [577] 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
 [601] 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
 [625] 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
 [649] 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
 [673] 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
 [697] 10 10 10 10 10 10 10 10 10 10 10 15 15 15 15 15 15 15 15 15 15 15 15 15
 [721] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15
 [745] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15
 [769] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15
 [793] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15
 [817] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 20 20 20
 [841] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [865] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [889] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [913] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [937] 20 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [961] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [985] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
newCouponData <- data.frame(y, x)
head(newCouponData)
  y x
1 1 5
2 1 5
3 1 5
4 1 5
5 1 5
6 1 5

Example 5 part 2

modelRep <- glm(y ~ x, family = binomial(link = 'logit'), data = newCouponData)
summary(modelRep)

Call:
glm(formula = y ~ x, family = binomial(link = "logit"), data = newCouponData)

Deviance Residuals: 
    Min       1Q   Median       3Q      Max  
-1.5578  -0.9385  -0.6176   1.2235   1.8713  

Coefficients:
             Estimate Std. Error z value Pr(>|z|)    
(Intercept) -2.044348   0.160977  -12.70   <2e-16 ***
x            0.096834   0.008549   11.33   <2e-16 ***
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

(Dispersion parameter for binomial family taken to be 1)

    Null deviance: 1339.3  on 999  degrees of freedom
Residual deviance: 1192.0  on 998  degrees of freedom
AIC: 1196

Number of Fisher Scoring iterations: 4

Example 5 part 4

library(dplyr)
newCouponData_p <- newCouponData %>%
  group_by(x) %>%
  summarize(p=mean(y))

newCouponData_p
# A tibble: 5 × 2
      x     p
  <int> <dbl>
1     5 0.15 
2    10 0.275
3    15 0.35 
4    20 0.5  
5    30 0.685

Example 5 part 5

library(ggplot2)
ggplot(data = newCouponData_p, aes(x = x, y = p)) + geom_point()+
geom_smooth(method = "glm", method.args = list(family = "binomial"), se = FALSE, color = "steelblue") + theme_bw()

Example 5 part 6

OR <- exp(coef(modelRep)[2]) # you can also do exp(0.096834)
OR
       x 
1.101677 

Disease Outbreak Example (Portion of Model-Building Data Set).

Example 6 part 1

DiseaseData <- read.table("http://www.cnachtsheim-text.csom.umn.edu/Kutner/Chapter%2014%20Data%20Sets/CH14TA03.txt", sep ="" , header = FALSE)
head(DiseaseData)
  V1 V2 V3 V4 V5 V6
1  1 33  0  0  0  0
2  2 35  0  0  0  0
3  3  6  0  0  0  0
4  4 60  0  0  0  0
5  5 18  0  1  0  1
6  6 26  0  1  0  0
dim(DiseaseData)
[1] 98  6
DiseaseData <- DiseaseData[,-1] # Remove the 'case' column

#Look at the first 6 entries
head(DiseaseData)
  V2 V3 V4 V5 V6
1 33  0  0  0  0
2 35  0  0  0  0
3  6  0  0  0  0
4 60  0  0  0  0
5 18  0  1  0  1
6 26  0  1  0  0
# Rename columns
names(DiseaseData) <- c( "x1", "x2" , "x3", "x4", "y")
head(DiseaseData)
  x1 x2 x3 x4 y
1 33  0  0  0 0
2 35  0  0  0 0
3  6  0  0  0 0
4 60  0  0  0 0
5 18  0  1  0 1
6 26  0  1  0 0
# Fitting the logistic model
DiseaseModel <- glm(y ~ x1 + x2 + x3 + x4, data = DiseaseData, family = binomial(link = 'logit'))
summary(DiseaseModel)

Call:
glm(formula = y ~ x1 + x2 + x3 + x4, family = binomial(link = "logit"), 
    data = DiseaseData)

Deviance Residuals: 
    Min       1Q   Median       3Q      Max  
-1.6552  -0.7529  -0.4788   0.8558   2.0977  

Coefficients:
            Estimate Std. Error z value Pr(>|z|)    
(Intercept) -2.31293    0.64259  -3.599 0.000319 ***
x1           0.02975    0.01350   2.203 0.027577 *  
x2           0.40879    0.59900   0.682 0.494954    
x3          -0.30525    0.60413  -0.505 0.613362    
x4           1.57475    0.50162   3.139 0.001693 ** 
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

(Dispersion parameter for binomial family taken to be 1)

    Null deviance: 122.32  on 97  degrees of freedom
Residual deviance: 101.05  on 93  degrees of freedom
AIC: 111.05

Number of Fisher Scoring iterations: 4

Example 6 part 3

# Odds ratio
OR1 <- exp(coef(DiseaseModel)) # Or find them individually, ex odds ratio for x1 is  exp(coef(DiseaseModel)[2]) 
OR1
(Intercept)          x1          x2          x3          x4 
 0.09897037  1.03019705  1.50499600  0.73693576  4.82953038 

Example 6 part 5

# Fitted values
DiseaseModel$fitted.values # This gives fitted values from the model for each case
         1          2          3          4          5          6          7 
0.20896395 0.21896953 0.10579477 0.37099998 0.11079091 0.13649792 0.08019586 
         8          9         10         11         12         13         14 
0.27251659 0.24404261 0.30930059 0.16401090 0.16401090 0.18098588 0.11453992 
        15         16         17         18         19         20         21 
0.58966189 0.47909131 0.77817637 0.37749723 0.36362034 0.42753041 0.57330701 
        22         23         24         25         26         27         28 
0.65081138 0.43482688 0.49946392 0.34820497 0.48459375 0.55134495 0.07184490 
        29         30         31         32         33         34         35 
0.30929149 0.11678980 0.10506349 0.16295636 0.29673562 0.10029650 0.56600353 
        36         37         38         39         40         41         42 
0.32823272 0.44025060 0.29630836 0.54397514 0.26625832 0.77684245 0.41303463 
        43         44         45         46         47         48         49 
0.49395445 0.79319961 0.36182805 0.50690100 0.38973214 0.70895535 0.70278774 
        50         51         52         53         54         55         56 
0.69021148 0.84469844 0.81204013 0.42026533 0.51626099 0.54588734 0.44950178 
        57         58         59         60         61         62         63 
0.60397800 0.67903030 0.54588734 0.74584435 0.11079091 0.07802859 0.07184490 
        64         65         66         67         68         69         70 
0.10506349 0.29672673 0.12306855 0.16813085 0.19459386 0.28000626 0.18543766 
        71         72         73         74         75         76         77 
0.18098588 0.18098588 0.18543766 0.31736018 0.09187623 0.15114561 0.22275465 
        78         79         80         81         82         83         84 
0.21263033 0.09764368 0.20283919 0.23859602 0.08019586 0.33527254 0.24956481 
        85         86         87         88         89         90         91 
0.32215388 0.11155707 0.15893898 0.09959900 0.08019586 0.08019586 0.08241795 
        92         93         94         95         96         97         98 
0.07591505 0.08469594 0.16295636 0.15893329 0.11375589 0.09187623 0.17122984 
cbind(DiseaseData, 'fitted' = DiseaseModel$fitted.values) # Table to make it easier to read
   x1 x2 x3 x4 y     fitted
1  33  0  0  0 0 0.20896395
2  35  0  0  0 0 0.21896953
3   6  0  0  0 0 0.10579477
4  60  0  0  0 0 0.37099998
5  18  0  1  0 1 0.11079091
6  26  0  1  0 0 0.13649792
7   6  0  1  0 0 0.08019586
8  31  1  0  0 1 0.27251659
9  26  1  0  0 1 0.24404261
10 37  1  0  0 0 0.30930059
11 23  0  0  0 0 0.16401090
12 23  0  0  0 0 0.16401090
13 27  0  0  0 0 0.18098588
14  9  0  0  0 1 0.11453992
15 37  0  0  1 1 0.58966189
16 22  0  0  1 1 0.47909131
17 67  0  0  1 1 0.77817637
18  8  0  0  1 0 0.37749723
19  6  0  0  1 1 0.36362034
20 15  0  0  1 1 0.42753041
21 21  1  0  1 1 0.57330701
22 32  1  0  1 1 0.65081138
23 16  0  0  1 1 0.43482688
24 11  1  0  1 0 0.49946392
25 14  0  1  1 0 0.34820497
26  9  1  0  1 0 0.48459375
27 18  1  0  1 0 0.55134495
28  2  0  1  0 0 0.07184490
29 61  0  1  0 0 0.30929149
30 20  0  1  0 0 0.11678980
31 16  0  1  0 0 0.10506349
32  9  1  0  0 0 0.16295636
33 35  1  0  0 0 0.29673562
34  4  0  0  0 0 0.10029650
35 44  0  1  1 0 0.56600353
36 11  0  1  1 1 0.32823272
37  3  1  0  1 0 0.44025060
38  6  0  1  1 0 0.29630836
39 17  1  0  1 1 0.54397514
40  1  0  1  1 0 0.26625832
41 53  1  0  1 1 0.77684245
42 13  0  0  1 1 0.41303463
43 24  0  0  1 0 0.49395445
44 70  0  0  1 1 0.79319961
45 16  0  1  1 1 0.36182805
46 12  1  0  1 0 0.50690100
47 20  0  1  1 1 0.38973214
48 65  0  1  1 0 0.70895535
49 40  1  0  1 1 0.70278774
50 38  1  0  1 1 0.69021148
51 68  1  0  1 1 0.84469844
52 74  0  0  1 1 0.81204013
53 14  0  0  1 1 0.42026533
54 27  0  0  1 1 0.51626099
55 31  0  0  1 0 0.54588734
56 18  0  0  1 0 0.44950178
57 39  0  0  1 0 0.60397800
58 50  0  0  1 0 0.67903030
59 31  0  0  1 0 0.54588734
60 61  0  0  1 0 0.74584435
61 18  0  1  0 0 0.11079091
62  5  0  1  0 0 0.07802859
63  2  0  1  0 0 0.07184490
64 16  0  1  0 0 0.10506349
65 59  0  1  0 1 0.29672673
66 22  0  1  0 0 0.12306855
67 24  0  0  0 0 0.16813085
68 30  0  0  0 0 0.19459386
69 46  0  0  0 0 0.28000626
70 28  0  0  0 0 0.18543766
71 27  0  0  0 0 0.18098588
72 27  0  0  0 1 0.18098588
73 28  0  0  0 0 0.18543766
74 52  0  0  0 1 0.31736018
75 11  0  1  0 0 0.09187623
76  6  1  0  0 0 0.15114561
77 46  0  1  0 0 0.22275465
78 20  1  0  0 1 0.21263033
79  3  0  0  0 0 0.09764368
80 18  1  0  0 0 0.20283919
81 25  1  0  0 0 0.23859602
82  6  0  1  0 0 0.08019586
83 65  0  1  0 1 0.33527254
84 51  0  1  0 0 0.24956481
85 39  1  0  0 0 0.32215388
86  8  0  0  0 0 0.11155707
87  8  1  0  0 0 0.15893898
88 14  0  1  0 0 0.09959900
89  6  0  1  0 0 0.08019586
90  6  0  1  0 0 0.08019586
91  7  0  1  0 0 0.08241795
92  4  0  1  0 0 0.07591505
93  8  0  1  0 0 0.08469594
94  9  1  0  0 0 0.16295636
95 32  0  1  0 1 0.15893329
96 19  0  1  0 0 0.11375589
97 11  0  1  0 0 0.09187623
98 35  0  1  0 0 0.17122984

Test Concerning a Single \(\beta_k\): Wald Test

Example 7 in the lecture note

Consider Programming Task Example

summary(logitfit)
## 
## Call:
## glm(formula = Y ~ X, family = binomial(link = "logit"), data = progTaskData)
## 
## Deviance Residuals: 
##     Min       1Q   Median       3Q      Max  
## -1.8992  -0.7509  -0.4140   0.7992   1.9624  
## 
## Coefficients:
##             Estimate Std. Error z value Pr(>|z|)  
## (Intercept) -3.05970    1.25935  -2.430   0.0151 *
## X            0.16149    0.06498   2.485   0.0129 *
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for binomial family taken to be 1)
## 
##     Null deviance: 34.296  on 24  degrees of freedom
## Residual deviance: 25.425  on 23  degrees of freedom
## AIC: 29.425
## 
## Number of Fisher Scoring iterations: 4

Example 8 in the lecture note

confint.default(logitfit, level = 0.95 )
##                   2.5 %     97.5 %
## (Intercept) -5.52797622 -0.5914155
## X            0.03412744  0.2888444
#exp(logitfit$coefficients)         # exponentiated coefficients (odds ratio)
exp(confint.default(logitfit))     # 95% CI for exponentiated coefficients (odds ratio)
##                   2.5 %    97.5 %
## (Intercept) 0.003974024 0.5535432
## X           1.034716464 1.3348840

Example 9 -Ltikelihood Ratio test

library(lmtest)
## Loading required package: zoo
## 
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
## 
##     as.Date, as.Date.numeric
DiseaseModelFull <- glm(y ~ x1 + x2 + x3 + x4, data = DiseaseData, family = binomial(link = 'logit')) # We got this model earlier

DiseaseModelReduced <- glm(y ~ x2 + x3 + x4, data = DiseaseData, family = binomial(link = 'logit'))

lrtest(DiseaseModelFull, DiseaseModelReduced)
## Likelihood ratio test
## 
## Model 1: y ~ x1 + x2 + x3 + x4
## Model 2: y ~ x2 + x3 + x4
##   #Df  LogLik Df  Chisq Pr(>Chisq)  
## 1   5 -50.527                       
## 2   4 -53.102 -1 5.1495    0.02325 *
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Example 10 (Best subsets procedures)

library(olsrr)
## 
## Attaching package: 'olsrr'
## The following object is masked from 'package:datasets':
## 
##     rivers
# To obtain the set of plots in page 362 
DiseaseModelAll <- lm(y ~ x1 + x2 + x3 + x4, data = DiseaseData)

obj2 <- ols_step_all_possible(DiseaseModelAll)
obj2 # Default table
##    Index N  Predictors   R-Square Adj. R-Square Mallow's Cp
## 4      1 1          x4 0.15082116   0.141975543    5.743342
## 1      2 1          x1 0.07734939   0.067738448   14.373232
## 3      3 1          x3 0.03988469   0.029883489   18.773782
## 2      4 1          x2 0.01509887   0.004839487   21.685089
## 7      5 2       x1 x4 0.19923504   0.182376826    2.056717
## 10     6 2       x3 x4 0.16026346   0.142584800    6.634262
## 9      7 2       x2 x4 0.15679919   0.139047595    7.041171
## 6      8 2       x1 x3 0.10222141   0.083320811   13.451799
## 5      9 2       x1 x2 0.09425257   0.075184207   14.387808
## 8     10 2       x2 x3 0.04150020   0.021321252   20.584027
## 12    11 3    x1 x2 x4 0.20685596   0.181542851    3.161574
## 13    12 3    x1 x3 x4 0.20427374   0.178878222    3.464878
## 14    13 3    x2 x3 x4 0.16187897   0.135130425    8.444507
## 11    14 3    x1 x2 x3 0.10670908   0.078199798   14.924684
## 15    15 4 x1 x2 x3 x4 0.20823154   0.174176984    5.000000
plot(obj2) # Plots

# Custom Tables (Ex: with AIC and SBC)
tabAIC_SBC <- cbind(obj2$predictors, obj2$aic, obj2$sbc)
tabAIC_SBC <- data.frame(tabAIC_SBC)
names(tabAIC_SBC) <- c("Predictors", "AIC", "SBC")
as.data.frame(tabAIC_SBC) 
##     Predictors              AIC              SBC
## 1           x4 118.027374117762 125.782276553774
## 2           x1 126.159493167441 133.914395603453
## 3           x3 130.060164455294 137.815066891306
## 4           x2 132.557975264164 140.312877700176
## 5        x1 x4 114.274544720797 124.614414635479
## 6        x3 x4 118.931575245362 129.271445160045
## 7        x2 x4 119.335035486157  129.67490540084
## 8        x1 x3 125.481432738266 135.821302652948
## 9        x1 x2 126.347459894853 136.687329809535
## 10       x2 x3 131.895129204697 142.234999119379
## 11    x1 x2 x4 115.337406952059 128.262244345412
## 12    x1 x3 x4  115.65594469589 128.580782089243
## 13    x2 x3 x4 120.742858899407  133.66769629276
## 14    x1 x2 x3 126.990337752539 139.915175145892
## 15 x1 x2 x3 x4 117.167293965701 132.677098837725
#Find best four models for each criteria

# Sort by AIC
tabAIC_SBC[order(tabAIC_SBC$AIC),]
##     Predictors              AIC              SBC
## 5        x1 x4 114.274544720797 124.614414635479
## 11    x1 x2 x4 115.337406952059 128.262244345412
## 12    x1 x3 x4  115.65594469589 128.580782089243
## 15 x1 x2 x3 x4 117.167293965701 132.677098837725
## 1           x4 118.027374117762 125.782276553774
## 6        x3 x4 118.931575245362 129.271445160045
## 7        x2 x4 119.335035486157  129.67490540084
## 13    x2 x3 x4 120.742858899407  133.66769629276
## 8        x1 x3 125.481432738266 135.821302652948
## 2           x1 126.159493167441 133.914395603453
## 9        x1 x2 126.347459894853 136.687329809535
## 14    x1 x2 x3 126.990337752539 139.915175145892
## 3           x3 130.060164455294 137.815066891306
## 10       x2 x3 131.895129204697 142.234999119379
## 4           x2 132.557975264164 140.312877700176
# Sort by SBC
tabAIC_SBC[order(tabAIC_SBC$SBC),]
##     Predictors              AIC              SBC
## 5        x1 x4 114.274544720797 124.614414635479
## 1           x4 118.027374117762 125.782276553774
## 11    x1 x2 x4 115.337406952059 128.262244345412
## 12    x1 x3 x4  115.65594469589 128.580782089243
## 6        x3 x4 118.931575245362 129.271445160045
## 7        x2 x4 119.335035486157  129.67490540084
## 15 x1 x2 x3 x4 117.167293965701 132.677098837725
## 13    x2 x3 x4 120.742858899407  133.66769629276
## 2           x1 126.159493167441 133.914395603453
## 8        x1 x3 125.481432738266 135.821302652948
## 9        x1 x2 126.347459894853 136.687329809535
## 3           x3 130.060164455294 137.815066891306
## 14    x1 x2 x3 126.990337752539 139.915175145892
## 4           x2 132.557975264164 140.312877700176
## 10       x2 x3 131.895129204697 142.234999119379

Pearson Chi-Square Goodness of Fit Test

CouponData <- read.table("http://www.cnachtsheim-text.csom.umn.edu/Kutner/Chapter%2014%20Data%20Sets/CH14TA02.txt", sep ="" , header = FALSE)
head(CouponData)
  V1  V2  V3    V4
1  5 200  30 0.150
2 10 200  55 0.275
3 15 200  70 0.350
4 20 200 100 0.500
5 30 200 137 0.685
CouponData <- CouponData[,1:3] # Remove the last column to make the data set more practical

#Look at the first 6 entries
head(CouponData)
  V1  V2  V3
1  5 200  30
2 10 200  55
3 15 200  70
4 20 200 100
5 30 200 137
names(CouponData) <- c( "X", "n" , "Y")
head(CouponData)
   X   n   Y
1  5 200  30
2 10 200  55
3 15 200  70
4 20 200 100
5 30 200 137
y <- c(rep(1, sum(CouponData$Y)), rep(0, 1000 - sum(CouponData$Y)))
y
   [1] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
  [38] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
  [75] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [112] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [149] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [186] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [223] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [260] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [297] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [334] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
 [371] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [408] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [445] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [482] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [519] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [556] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [593] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [630] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [667] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [704] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [741] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [778] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [815] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [852] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [889] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [926] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
 [963] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
[1000] 0
x <- c(rep(CouponData$X, CouponData$Y), rep(CouponData$X, 200 - CouponData$Y))
x
   [1]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
  [25]  5  5  5  5  5  5 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
  [49] 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
  [73] 10 10 10 10 10 10 10 10 10 10 10 10 10 15 15 15 15 15 15 15 15 15 15 15
  [97] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15
 [121] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15
 [145] 15 15 15 15 15 15 15 15 15 15 15 20 20 20 20 20 20 20 20 20 20 20 20 20
 [169] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [193] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [217] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [241] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 30 30 30 30 30 30 30 30 30
 [265] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [289] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [313] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [337] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [361] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [385] 30 30 30 30 30 30 30 30  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [409]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [433]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [457]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [481]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [505]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [529]  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5  5
 [553]  5  5  5  5  5  5  5  5  5  5 10 10 10 10 10 10 10 10 10 10 10 10 10 10
 [577] 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
 [601] 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
 [625] 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
 [649] 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
 [673] 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10
 [697] 10 10 10 10 10 10 10 10 10 10 10 15 15 15 15 15 15 15 15 15 15 15 15 15
 [721] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15
 [745] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15
 [769] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15
 [793] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15
 [817] 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 15 20 20 20
 [841] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [865] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [889] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [913] 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20 20
 [937] 20 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [961] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
 [985] 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30 30
newCouponData <- data.frame(y, x)
head(newCouponData)
  y x
1 1 5
2 1 5
3 1 5
4 1 5
5 1 5
6 1 5
modelRep <- glm(y ~ x, family = binomial(link = 'logit'), data = newCouponData)
summary(modelRep)

Call:
glm(formula = y ~ x, family = binomial(link = "logit"), data = newCouponData)

Deviance Residuals: 
    Min       1Q   Median       3Q      Max  
-1.5578  -0.9385  -0.6176   1.2235   1.8713  

Coefficients:
             Estimate Std. Error z value Pr(>|z|)    
(Intercept) -2.044348   0.160977  -12.70   <2e-16 ***
x            0.096834   0.008549   11.33   <2e-16 ***
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

(Dispersion parameter for binomial family taken to be 1)

    Null deviance: 1339.3  on 999  degrees of freedom
Residual deviance: 1192.0  on 998  degrees of freedom
AIC: 1196

Number of Fisher Scoring iterations: 4
obs_exp <- data.frame(fits = modelRep$fitted.values, xlevels = newCouponData$x, yvals = newCouponData$y)
obs_exp
          fits xlevels yvals
1    0.1736208       5     1
2    0.1736208       5     1
3    0.1736208       5     1
4    0.1736208       5     1
5    0.1736208       5     1
6    0.1736208       5     1
7    0.1736208       5     1
8    0.1736208       5     1
9    0.1736208       5     1
10   0.1736208       5     1
11   0.1736208       5     1
12   0.1736208       5     1
13   0.1736208       5     1
14   0.1736208       5     1
15   0.1736208       5     1
16   0.1736208       5     1
17   0.1736208       5     1
18   0.1736208       5     1
19   0.1736208       5     1
20   0.1736208       5     1
21   0.1736208       5     1
22   0.1736208       5     1
23   0.1736208       5     1
24   0.1736208       5     1
25   0.1736208       5     1
26   0.1736208       5     1
27   0.1736208       5     1
28   0.1736208       5     1
29   0.1736208       5     1
30   0.1736208       5     1
31   0.2542615      10     1
32   0.2542615      10     1
33   0.2542615      10     1
34   0.2542615      10     1
35   0.2542615      10     1
36   0.2542615      10     1
37   0.2542615      10     1
38   0.2542615      10     1
39   0.2542615      10     1
40   0.2542615      10     1
41   0.2542615      10     1
42   0.2542615      10     1
43   0.2542615      10     1
44   0.2542615      10     1
45   0.2542615      10     1
46   0.2542615      10     1
47   0.2542615      10     1
48   0.2542615      10     1
49   0.2542615      10     1
50   0.2542615      10     1
51   0.2542615      10     1
52   0.2542615      10     1
53   0.2542615      10     1
54   0.2542615      10     1
55   0.2542615      10     1
56   0.2542615      10     1
57   0.2542615      10     1
58   0.2542615      10     1
59   0.2542615      10     1
60   0.2542615      10     1
61   0.2542615      10     1
62   0.2542615      10     1
63   0.2542615      10     1
64   0.2542615      10     1
65   0.2542615      10     1
66   0.2542615      10     1
67   0.2542615      10     1
68   0.2542615      10     1
69   0.2542615      10     1
70   0.2542615      10     1
71   0.2542615      10     1
72   0.2542615      10     1
73   0.2542615      10     1
74   0.2542615      10     1
75   0.2542615      10     1
76   0.2542615      10     1
77   0.2542615      10     1
78   0.2542615      10     1
79   0.2542615      10     1
80   0.2542615      10     1
81   0.2542615      10     1
82   0.2542615      10     1
83   0.2542615      10     1
84   0.2542615      10     1
85   0.2542615      10     1
86   0.3562119      15     1
87   0.3562119      15     1
88   0.3562119      15     1
89   0.3562119      15     1
90   0.3562119      15     1
91   0.3562119      15     1
92   0.3562119      15     1
93   0.3562119      15     1
94   0.3562119      15     1
95   0.3562119      15     1
96   0.3562119      15     1
97   0.3562119      15     1
98   0.3562119      15     1
99   0.3562119      15     1
100  0.3562119      15     1
101  0.3562119      15     1
102  0.3562119      15     1
103  0.3562119      15     1
104  0.3562119      15     1
105  0.3562119      15     1
106  0.3562119      15     1
107  0.3562119      15     1
108  0.3562119      15     1
109  0.3562119      15     1
110  0.3562119      15     1
111  0.3562119      15     1
112  0.3562119      15     1
113  0.3562119      15     1
114  0.3562119      15     1
115  0.3562119      15     1
116  0.3562119      15     1
117  0.3562119      15     1
118  0.3562119      15     1
119  0.3562119      15     1
120  0.3562119      15     1
121  0.3562119      15     1
122  0.3562119      15     1
123  0.3562119      15     1
124  0.3562119      15     1
125  0.3562119      15     1
126  0.3562119      15     1
127  0.3562119      15     1
128  0.3562119      15     1
129  0.3562119      15     1
130  0.3562119      15     1
131  0.3562119      15     1
132  0.3562119      15     1
133  0.3562119      15     1
134  0.3562119      15     1
135  0.3562119      15     1
136  0.3562119      15     1
137  0.3562119      15     1
138  0.3562119      15     1
139  0.3562119      15     1
140  0.3562119      15     1
141  0.3562119      15     1
142  0.3562119      15     1
143  0.3562119      15     1
144  0.3562119      15     1
145  0.3562119      15     1
146  0.3562119      15     1
147  0.3562119      15     1
148  0.3562119      15     1
149  0.3562119      15     1
150  0.3562119      15     1
151  0.3562119      15     1
152  0.3562119      15     1
153  0.3562119      15     1
154  0.3562119      15     1
155  0.3562119      15     1
156  0.4731071      20     1
157  0.4731071      20     1
158  0.4731071      20     1
159  0.4731071      20     1
160  0.4731071      20     1
161  0.4731071      20     1
162  0.4731071      20     1
163  0.4731071      20     1
164  0.4731071      20     1
165  0.4731071      20     1
166  0.4731071      20     1
167  0.4731071      20     1
168  0.4731071      20     1
169  0.4731071      20     1
170  0.4731071      20     1
171  0.4731071      20     1
172  0.4731071      20     1
173  0.4731071      20     1
174  0.4731071      20     1
175  0.4731071      20     1
176  0.4731071      20     1
177  0.4731071      20     1
178  0.4731071      20     1
179  0.4731071      20     1
180  0.4731071      20     1
181  0.4731071      20     1
182  0.4731071      20     1
183  0.4731071      20     1
184  0.4731071      20     1
185  0.4731071      20     1
186  0.4731071      20     1
187  0.4731071      20     1
188  0.4731071      20     1
189  0.4731071      20     1
190  0.4731071      20     1
191  0.4731071      20     1
192  0.4731071      20     1
193  0.4731071      20     1
194  0.4731071      20     1
195  0.4731071      20     1
196  0.4731071      20     1
197  0.4731071      20     1
198  0.4731071      20     1
199  0.4731071      20     1
200  0.4731071      20     1
201  0.4731071      20     1
202  0.4731071      20     1
203  0.4731071      20     1
204  0.4731071      20     1
205  0.4731071      20     1
206  0.4731071      20     1
207  0.4731071      20     1
208  0.4731071      20     1
209  0.4731071      20     1
210  0.4731071      20     1
211  0.4731071      20     1
212  0.4731071      20     1
213  0.4731071      20     1
214  0.4731071      20     1
215  0.4731071      20     1
216  0.4731071      20     1
217  0.4731071      20     1
218  0.4731071      20     1
219  0.4731071      20     1
220  0.4731071      20     1
221  0.4731071      20     1
222  0.4731071      20     1
223  0.4731071      20     1
224  0.4731071      20     1
225  0.4731071      20     1
226  0.4731071      20     1
227  0.4731071      20     1
228  0.4731071      20     1
229  0.4731071      20     1
230  0.4731071      20     1
231  0.4731071      20     1
232  0.4731071      20     1
233  0.4731071      20     1
234  0.4731071      20     1
235  0.4731071      20     1
236  0.4731071      20     1
237  0.4731071      20     1
238  0.4731071      20     1
239  0.4731071      20     1
240  0.4731071      20     1
241  0.4731071      20     1
242  0.4731071      20     1
243  0.4731071      20     1
244  0.4731071      20     1
245  0.4731071      20     1
246  0.4731071      20     1
247  0.4731071      20     1
248  0.4731071      20     1
249  0.4731071      20     1
250  0.4731071      20     1
251  0.4731071      20     1
252  0.4731071      20     1
253  0.4731071      20     1
254  0.4731071      20     1
255  0.4731071      20     1
256  0.7027987      30     1
257  0.7027987      30     1
258  0.7027987      30     1
259  0.7027987      30     1
260  0.7027987      30     1
261  0.7027987      30     1
262  0.7027987      30     1
263  0.7027987      30     1
264  0.7027987      30     1
265  0.7027987      30     1
266  0.7027987      30     1
267  0.7027987      30     1
268  0.7027987      30     1
269  0.7027987      30     1
270  0.7027987      30     1
271  0.7027987      30     1
272  0.7027987      30     1
273  0.7027987      30     1
274  0.7027987      30     1
275  0.7027987      30     1
276  0.7027987      30     1
277  0.7027987      30     1
278  0.7027987      30     1
279  0.7027987      30     1
280  0.7027987      30     1
281  0.7027987      30     1
282  0.7027987      30     1
283  0.7027987      30     1
284  0.7027987      30     1
285  0.7027987      30     1
286  0.7027987      30     1
287  0.7027987      30     1
288  0.7027987      30     1
289  0.7027987      30     1
290  0.7027987      30     1
291  0.7027987      30     1
292  0.7027987      30     1
293  0.7027987      30     1
294  0.7027987      30     1
295  0.7027987      30     1
296  0.7027987      30     1
297  0.7027987      30     1
298  0.7027987      30     1
299  0.7027987      30     1
300  0.7027987      30     1
301  0.7027987      30     1
302  0.7027987      30     1
303  0.7027987      30     1
304  0.7027987      30     1
305  0.7027987      30     1
306  0.7027987      30     1
307  0.7027987      30     1
308  0.7027987      30     1
309  0.7027987      30     1
310  0.7027987      30     1
311  0.7027987      30     1
312  0.7027987      30     1
313  0.7027987      30     1
314  0.7027987      30     1
315  0.7027987      30     1
316  0.7027987      30     1
317  0.7027987      30     1
318  0.7027987      30     1
319  0.7027987      30     1
320  0.7027987      30     1
321  0.7027987      30     1
322  0.7027987      30     1
323  0.7027987      30     1
324  0.7027987      30     1
325  0.7027987      30     1
326  0.7027987      30     1
327  0.7027987      30     1
328  0.7027987      30     1
329  0.7027987      30     1
330  0.7027987      30     1
331  0.7027987      30     1
332  0.7027987      30     1
333  0.7027987      30     1
334  0.7027987      30     1
335  0.7027987      30     1
336  0.7027987      30     1
337  0.7027987      30     1
338  0.7027987      30     1
339  0.7027987      30     1
340  0.7027987      30     1
341  0.7027987      30     1
342  0.7027987      30     1
343  0.7027987      30     1
344  0.7027987      30     1
345  0.7027987      30     1
346  0.7027987      30     1
347  0.7027987      30     1
348  0.7027987      30     1
349  0.7027987      30     1
350  0.7027987      30     1
351  0.7027987      30     1
352  0.7027987      30     1
353  0.7027987      30     1
354  0.7027987      30     1
355  0.7027987      30     1
356  0.7027987      30     1
357  0.7027987      30     1
358  0.7027987      30     1
359  0.7027987      30     1
360  0.7027987      30     1
361  0.7027987      30     1
362  0.7027987      30     1
363  0.7027987      30     1
364  0.7027987      30     1
365  0.7027987      30     1
366  0.7027987      30     1
367  0.7027987      30     1
368  0.7027987      30     1
369  0.7027987      30     1
370  0.7027987      30     1
371  0.7027987      30     1
372  0.7027987      30     1
373  0.7027987      30     1
374  0.7027987      30     1
375  0.7027987      30     1
376  0.7027987      30     1
377  0.7027987      30     1
378  0.7027987      30     1
379  0.7027987      30     1
380  0.7027987      30     1
381  0.7027987      30     1
382  0.7027987      30     1
383  0.7027987      30     1
384  0.7027987      30     1
385  0.7027987      30     1
386  0.7027987      30     1
387  0.7027987      30     1
388  0.7027987      30     1
389  0.7027987      30     1
390  0.7027987      30     1
391  0.7027987      30     1
392  0.7027987      30     1
393  0.1736208       5     0
394  0.1736208       5     0
395  0.1736208       5     0
396  0.1736208       5     0
397  0.1736208       5     0
398  0.1736208       5     0
399  0.1736208       5     0
400  0.1736208       5     0
401  0.1736208       5     0
402  0.1736208       5     0
403  0.1736208       5     0
404  0.1736208       5     0
405  0.1736208       5     0
406  0.1736208       5     0
407  0.1736208       5     0
408  0.1736208       5     0
409  0.1736208       5     0
410  0.1736208       5     0
411  0.1736208       5     0
412  0.1736208       5     0
413  0.1736208       5     0
414  0.1736208       5     0
415  0.1736208       5     0
416  0.1736208       5     0
417  0.1736208       5     0
418  0.1736208       5     0
419  0.1736208       5     0
420  0.1736208       5     0
421  0.1736208       5     0
422  0.1736208       5     0
423  0.1736208       5     0
424  0.1736208       5     0
425  0.1736208       5     0
426  0.1736208       5     0
427  0.1736208       5     0
428  0.1736208       5     0
429  0.1736208       5     0
430  0.1736208       5     0
431  0.1736208       5     0
432  0.1736208       5     0
433  0.1736208       5     0
434  0.1736208       5     0
435  0.1736208       5     0
436  0.1736208       5     0
437  0.1736208       5     0
438  0.1736208       5     0
439  0.1736208       5     0
440  0.1736208       5     0
441  0.1736208       5     0
442  0.1736208       5     0
443  0.1736208       5     0
444  0.1736208       5     0
445  0.1736208       5     0
446  0.1736208       5     0
447  0.1736208       5     0
448  0.1736208       5     0
449  0.1736208       5     0
450  0.1736208       5     0
451  0.1736208       5     0
452  0.1736208       5     0
453  0.1736208       5     0
454  0.1736208       5     0
455  0.1736208       5     0
456  0.1736208       5     0
457  0.1736208       5     0
458  0.1736208       5     0
459  0.1736208       5     0
460  0.1736208       5     0
461  0.1736208       5     0
462  0.1736208       5     0
463  0.1736208       5     0
464  0.1736208       5     0
465  0.1736208       5     0
466  0.1736208       5     0
467  0.1736208       5     0
468  0.1736208       5     0
469  0.1736208       5     0
470  0.1736208       5     0
471  0.1736208       5     0
472  0.1736208       5     0
473  0.1736208       5     0
474  0.1736208       5     0
475  0.1736208       5     0
476  0.1736208       5     0
477  0.1736208       5     0
478  0.1736208       5     0
479  0.1736208       5     0
480  0.1736208       5     0
481  0.1736208       5     0
482  0.1736208       5     0
483  0.1736208       5     0
484  0.1736208       5     0
485  0.1736208       5     0
486  0.1736208       5     0
487  0.1736208       5     0
488  0.1736208       5     0
489  0.1736208       5     0
490  0.1736208       5     0
491  0.1736208       5     0
492  0.1736208       5     0
493  0.1736208       5     0
494  0.1736208       5     0
495  0.1736208       5     0
496  0.1736208       5     0
497  0.1736208       5     0
498  0.1736208       5     0
499  0.1736208       5     0
500  0.1736208       5     0
501  0.1736208       5     0
502  0.1736208       5     0
503  0.1736208       5     0
504  0.1736208       5     0
505  0.1736208       5     0
506  0.1736208       5     0
507  0.1736208       5     0
508  0.1736208       5     0
509  0.1736208       5     0
510  0.1736208       5     0
511  0.1736208       5     0
512  0.1736208       5     0
513  0.1736208       5     0
514  0.1736208       5     0
515  0.1736208       5     0
516  0.1736208       5     0
517  0.1736208       5     0
518  0.1736208       5     0
519  0.1736208       5     0
520  0.1736208       5     0
521  0.1736208       5     0
522  0.1736208       5     0
523  0.1736208       5     0
524  0.1736208       5     0
525  0.1736208       5     0
526  0.1736208       5     0
527  0.1736208       5     0
528  0.1736208       5     0
529  0.1736208       5     0
530  0.1736208       5     0
531  0.1736208       5     0
532  0.1736208       5     0
533  0.1736208       5     0
534  0.1736208       5     0
535  0.1736208       5     0
536  0.1736208       5     0
537  0.1736208       5     0
538  0.1736208       5     0
539  0.1736208       5     0
540  0.1736208       5     0
541  0.1736208       5     0
542  0.1736208       5     0
543  0.1736208       5     0
544  0.1736208       5     0
545  0.1736208       5     0
546  0.1736208       5     0
547  0.1736208       5     0
548  0.1736208       5     0
549  0.1736208       5     0
550  0.1736208       5     0
551  0.1736208       5     0
552  0.1736208       5     0
553  0.1736208       5     0
554  0.1736208       5     0
555  0.1736208       5     0
556  0.1736208       5     0
557  0.1736208       5     0
558  0.1736208       5     0
559  0.1736208       5     0
560  0.1736208       5     0
561  0.1736208       5     0
562  0.1736208       5     0
563  0.2542615      10     0
564  0.2542615      10     0
565  0.2542615      10     0
566  0.2542615      10     0
567  0.2542615      10     0
568  0.2542615      10     0
569  0.2542615      10     0
570  0.2542615      10     0
571  0.2542615      10     0
572  0.2542615      10     0
573  0.2542615      10     0
574  0.2542615      10     0
575  0.2542615      10     0
576  0.2542615      10     0
577  0.2542615      10     0
578  0.2542615      10     0
579  0.2542615      10     0
580  0.2542615      10     0
581  0.2542615      10     0
582  0.2542615      10     0
583  0.2542615      10     0
584  0.2542615      10     0
585  0.2542615      10     0
586  0.2542615      10     0
587  0.2542615      10     0
588  0.2542615      10     0
589  0.2542615      10     0
590  0.2542615      10     0
591  0.2542615      10     0
592  0.2542615      10     0
593  0.2542615      10     0
594  0.2542615      10     0
595  0.2542615      10     0
596  0.2542615      10     0
597  0.2542615      10     0
598  0.2542615      10     0
599  0.2542615      10     0
600  0.2542615      10     0
601  0.2542615      10     0
602  0.2542615      10     0
603  0.2542615      10     0
604  0.2542615      10     0
605  0.2542615      10     0
606  0.2542615      10     0
607  0.2542615      10     0
608  0.2542615      10     0
609  0.2542615      10     0
610  0.2542615      10     0
611  0.2542615      10     0
612  0.2542615      10     0
613  0.2542615      10     0
614  0.2542615      10     0
615  0.2542615      10     0
616  0.2542615      10     0
617  0.2542615      10     0
618  0.2542615      10     0
619  0.2542615      10     0
620  0.2542615      10     0
621  0.2542615      10     0
622  0.2542615      10     0
623  0.2542615      10     0
624  0.2542615      10     0
625  0.2542615      10     0
626  0.2542615      10     0
627  0.2542615      10     0
628  0.2542615      10     0
629  0.2542615      10     0
630  0.2542615      10     0
631  0.2542615      10     0
632  0.2542615      10     0
633  0.2542615      10     0
634  0.2542615      10     0
635  0.2542615      10     0
636  0.2542615      10     0
637  0.2542615      10     0
638  0.2542615      10     0
639  0.2542615      10     0
640  0.2542615      10     0
641  0.2542615      10     0
642  0.2542615      10     0
643  0.2542615      10     0
644  0.2542615      10     0
645  0.2542615      10     0
646  0.2542615      10     0
647  0.2542615      10     0
648  0.2542615      10     0
649  0.2542615      10     0
650  0.2542615      10     0
651  0.2542615      10     0
652  0.2542615      10     0
653  0.2542615      10     0
654  0.2542615      10     0
655  0.2542615      10     0
656  0.2542615      10     0
657  0.2542615      10     0
658  0.2542615      10     0
659  0.2542615      10     0
660  0.2542615      10     0
661  0.2542615      10     0
662  0.2542615      10     0
663  0.2542615      10     0
664  0.2542615      10     0
665  0.2542615      10     0
666  0.2542615      10     0
667  0.2542615      10     0
668  0.2542615      10     0
669  0.2542615      10     0
670  0.2542615      10     0
671  0.2542615      10     0
672  0.2542615      10     0
673  0.2542615      10     0
674  0.2542615      10     0
675  0.2542615      10     0
676  0.2542615      10     0
677  0.2542615      10     0
678  0.2542615      10     0
679  0.2542615      10     0
680  0.2542615      10     0
681  0.2542615      10     0
682  0.2542615      10     0
683  0.2542615      10     0
684  0.2542615      10     0
685  0.2542615      10     0
686  0.2542615      10     0
687  0.2542615      10     0
688  0.2542615      10     0
689  0.2542615      10     0
690  0.2542615      10     0
691  0.2542615      10     0
692  0.2542615      10     0
693  0.2542615      10     0
694  0.2542615      10     0
695  0.2542615      10     0
696  0.2542615      10     0
697  0.2542615      10     0
698  0.2542615      10     0
699  0.2542615      10     0
700  0.2542615      10     0
701  0.2542615      10     0
702  0.2542615      10     0
703  0.2542615      10     0
704  0.2542615      10     0
705  0.2542615      10     0
706  0.2542615      10     0
707  0.2542615      10     0
708  0.3562119      15     0
709  0.3562119      15     0
710  0.3562119      15     0
711  0.3562119      15     0
712  0.3562119      15     0
713  0.3562119      15     0
714  0.3562119      15     0
715  0.3562119      15     0
716  0.3562119      15     0
717  0.3562119      15     0
718  0.3562119      15     0
719  0.3562119      15     0
720  0.3562119      15     0
721  0.3562119      15     0
722  0.3562119      15     0
723  0.3562119      15     0
724  0.3562119      15     0
725  0.3562119      15     0
726  0.3562119      15     0
727  0.3562119      15     0
728  0.3562119      15     0
729  0.3562119      15     0
730  0.3562119      15     0
731  0.3562119      15     0
732  0.3562119      15     0
733  0.3562119      15     0
734  0.3562119      15     0
735  0.3562119      15     0
736  0.3562119      15     0
737  0.3562119      15     0
738  0.3562119      15     0
739  0.3562119      15     0
740  0.3562119      15     0
741  0.3562119      15     0
742  0.3562119      15     0
743  0.3562119      15     0
744  0.3562119      15     0
745  0.3562119      15     0
746  0.3562119      15     0
747  0.3562119      15     0
748  0.3562119      15     0
749  0.3562119      15     0
750  0.3562119      15     0
751  0.3562119      15     0
752  0.3562119      15     0
753  0.3562119      15     0
754  0.3562119      15     0
755  0.3562119      15     0
756  0.3562119      15     0
757  0.3562119      15     0
758  0.3562119      15     0
759  0.3562119      15     0
760  0.3562119      15     0
761  0.3562119      15     0
762  0.3562119      15     0
763  0.3562119      15     0
764  0.3562119      15     0
765  0.3562119      15     0
766  0.3562119      15     0
767  0.3562119      15     0
768  0.3562119      15     0
769  0.3562119      15     0
770  0.3562119      15     0
771  0.3562119      15     0
772  0.3562119      15     0
773  0.3562119      15     0
774  0.3562119      15     0
775  0.3562119      15     0
776  0.3562119      15     0
777  0.3562119      15     0
778  0.3562119      15     0
779  0.3562119      15     0
780  0.3562119      15     0
781  0.3562119      15     0
782  0.3562119      15     0
783  0.3562119      15     0
784  0.3562119      15     0
785  0.3562119      15     0
786  0.3562119      15     0
787  0.3562119      15     0
788  0.3562119      15     0
789  0.3562119      15     0
790  0.3562119      15     0
791  0.3562119      15     0
792  0.3562119      15     0
793  0.3562119      15     0
794  0.3562119      15     0
795  0.3562119      15     0
796  0.3562119      15     0
797  0.3562119      15     0
798  0.3562119      15     0
799  0.3562119      15     0
800  0.3562119      15     0
801  0.3562119      15     0
802  0.3562119      15     0
803  0.3562119      15     0
804  0.3562119      15     0
805  0.3562119      15     0
806  0.3562119      15     0
807  0.3562119      15     0
808  0.3562119      15     0
809  0.3562119      15     0
810  0.3562119      15     0
811  0.3562119      15     0
812  0.3562119      15     0
813  0.3562119      15     0
814  0.3562119      15     0
815  0.3562119      15     0
816  0.3562119      15     0
817  0.3562119      15     0
818  0.3562119      15     0
819  0.3562119      15     0
820  0.3562119      15     0
821  0.3562119      15     0
822  0.3562119      15     0
823  0.3562119      15     0
824  0.3562119      15     0
825  0.3562119      15     0
826  0.3562119      15     0
827  0.3562119      15     0
828  0.3562119      15     0
829  0.3562119      15     0
830  0.3562119      15     0
831  0.3562119      15     0
832  0.3562119      15     0
833  0.3562119      15     0
834  0.3562119      15     0
835  0.3562119      15     0
836  0.3562119      15     0
837  0.3562119      15     0
838  0.4731071      20     0
839  0.4731071      20     0
840  0.4731071      20     0
841  0.4731071      20     0
842  0.4731071      20     0
843  0.4731071      20     0
844  0.4731071      20     0
845  0.4731071      20     0
846  0.4731071      20     0
847  0.4731071      20     0
848  0.4731071      20     0
849  0.4731071      20     0
850  0.4731071      20     0
851  0.4731071      20     0
852  0.4731071      20     0
853  0.4731071      20     0
854  0.4731071      20     0
855  0.4731071      20     0
856  0.4731071      20     0
857  0.4731071      20     0
858  0.4731071      20     0
859  0.4731071      20     0
860  0.4731071      20     0
861  0.4731071      20     0
862  0.4731071      20     0
863  0.4731071      20     0
864  0.4731071      20     0
865  0.4731071      20     0
866  0.4731071      20     0
867  0.4731071      20     0
868  0.4731071      20     0
869  0.4731071      20     0
870  0.4731071      20     0
871  0.4731071      20     0
872  0.4731071      20     0
873  0.4731071      20     0
874  0.4731071      20     0
875  0.4731071      20     0
876  0.4731071      20     0
877  0.4731071      20     0
878  0.4731071      20     0
879  0.4731071      20     0
880  0.4731071      20     0
881  0.4731071      20     0
882  0.4731071      20     0
883  0.4731071      20     0
884  0.4731071      20     0
885  0.4731071      20     0
886  0.4731071      20     0
887  0.4731071      20     0
888  0.4731071      20     0
889  0.4731071      20     0
890  0.4731071      20     0
891  0.4731071      20     0
892  0.4731071      20     0
893  0.4731071      20     0
894  0.4731071      20     0
895  0.4731071      20     0
896  0.4731071      20     0
897  0.4731071      20     0
898  0.4731071      20     0
899  0.4731071      20     0
900  0.4731071      20     0
901  0.4731071      20     0
902  0.4731071      20     0
903  0.4731071      20     0
904  0.4731071      20     0
905  0.4731071      20     0
906  0.4731071      20     0
907  0.4731071      20     0
908  0.4731071      20     0
909  0.4731071      20     0
910  0.4731071      20     0
911  0.4731071      20     0
912  0.4731071      20     0
913  0.4731071      20     0
914  0.4731071      20     0
915  0.4731071      20     0
916  0.4731071      20     0
917  0.4731071      20     0
918  0.4731071      20     0
919  0.4731071      20     0
920  0.4731071      20     0
921  0.4731071      20     0
922  0.4731071      20     0
923  0.4731071      20     0
924  0.4731071      20     0
925  0.4731071      20     0
926  0.4731071      20     0
927  0.4731071      20     0
928  0.4731071      20     0
929  0.4731071      20     0
930  0.4731071      20     0
931  0.4731071      20     0
932  0.4731071      20     0
933  0.4731071      20     0
934  0.4731071      20     0
935  0.4731071      20     0
936  0.4731071      20     0
937  0.4731071      20     0
938  0.7027987      30     0
939  0.7027987      30     0
940  0.7027987      30     0
941  0.7027987      30     0
942  0.7027987      30     0
943  0.7027987      30     0
944  0.7027987      30     0
945  0.7027987      30     0
946  0.7027987      30     0
947  0.7027987      30     0
948  0.7027987      30     0
949  0.7027987      30     0
950  0.7027987      30     0
951  0.7027987      30     0
952  0.7027987      30     0
953  0.7027987      30     0
954  0.7027987      30     0
955  0.7027987      30     0
956  0.7027987      30     0
957  0.7027987      30     0
958  0.7027987      30     0
959  0.7027987      30     0
960  0.7027987      30     0
961  0.7027987      30     0
962  0.7027987      30     0
963  0.7027987      30     0
964  0.7027987      30     0
965  0.7027987      30     0
966  0.7027987      30     0
967  0.7027987      30     0
968  0.7027987      30     0
969  0.7027987      30     0
970  0.7027987      30     0
971  0.7027987      30     0
972  0.7027987      30     0
973  0.7027987      30     0
974  0.7027987      30     0
975  0.7027987      30     0
976  0.7027987      30     0
977  0.7027987      30     0
978  0.7027987      30     0
979  0.7027987      30     0
980  0.7027987      30     0
981  0.7027987      30     0
982  0.7027987      30     0
983  0.7027987      30     0
984  0.7027987      30     0
985  0.7027987      30     0
986  0.7027987      30     0
987  0.7027987      30     0
988  0.7027987      30     0
989  0.7027987      30     0
990  0.7027987      30     0
991  0.7027987      30     0
992  0.7027987      30     0
993  0.7027987      30     0
994  0.7027987      30     0
995  0.7027987      30     0
996  0.7027987      30     0
997  0.7027987      30     0
998  0.7027987      30     0
999  0.7027987      30     0
1000 0.7027987      30     0
# For y = 1
obs_exp_group_cou_red <- obs_exp %>%
   group_by(xlevels) %>%
  summarize(ni = n(), Pihat = mean(fits), Obs_cou_red = sum(yvals), Exp_cou_red = ni*Pihat)

obs_exp_group_cou_red
# A tibble: 5 × 5
  xlevels    ni Pihat Obs_cou_red Exp_cou_red
    <int> <int> <dbl>       <dbl>       <dbl>
1       5   200 0.174          30        34.7
2      10   200 0.254          55        50.9
3      15   200 0.356          70        71.2
4      20   200 0.473         100        94.6
5      30   200 0.703         137       141. 
# For y = 0
obs_exp_group_cou_not_red <- obs_exp %>%
   group_by(xlevels) %>%
  summarize(ni = n(), Pihat = mean(fits), Obs_cou_not_red = 200-sum(yvals), Exp_cou_not_red = ni*(1-Pihat))

obs_exp_group_cou_not_red
# A tibble: 5 × 5
  xlevels    ni Pihat Obs_cou_not_red Exp_cou_not_red
    <int> <int> <dbl>           <dbl>           <dbl>
1       5   200 0.174             170           165. 
2      10   200 0.254             145           149. 
3      15   200 0.356             130           129. 
4      20   200 0.473             100           105. 
5      30   200 0.703              63            59.4
chiSq1 <- (obs_exp_group_cou_red$Obs_cou_red - obs_exp_group_cou_red$Exp_cou_red)^2/obs_exp_group_cou_red$Exp_cou_red 
chiSq1
[1] 0.64271462 0.33830232 0.02166583 0.30573577 0.09015184
chiSq2 <- (obs_exp_group_cou_not_red$Obs_cou_not_red - obs_exp_group_cou_not_red$Exp_cou_not_red)^2/obs_exp_group_cou_not_red$Exp_cou_not_red
chiSq2
[1] 0.13503322 0.11534505 0.01198784 0.27452591 0.21318408
ChiSquareTestStat <- sum(chiSq1) + sum(chiSq2)
ChiSquareTestStat
[1] 2.148646
df <- 5 - 2 # df = number of classes - number of parameters in the model
p_value <- 1-pchisq(ChiSquareTestStat, 3)
p_value
[1] 0.5421341

Prediction of a New Observation

DiseaseData <- read.table("http://www.cnachtsheim-text.csom.umn.edu/Kutner/Chapter%2014%20Data%20Sets/CH14TA03.txt", sep ="" , header = FALSE)
head(DiseaseData)
  V1 V2 V3 V4 V5 V6
1  1 33  0  0  0  0
2  2 35  0  0  0  0
3  3  6  0  0  0  0
4  4 60  0  0  0  0
5  5 18  0  1  0  1
6  6 26  0  1  0  0
dim(DiseaseData)
[1] 98  6
DiseaseData <- DiseaseData[,-1] # Remove the 'case' column

#Look at the first 6 entries
head(DiseaseData)
  V2 V3 V4 V5 V6
1 33  0  0  0  0
2 35  0  0  0  0
3  6  0  0  0  0
4 60  0  0  0  0
5 18  0  1  0  1
6 26  0  1  0  0
# Rename columns
names(DiseaseData) <- c( "x1", "x2" , "x3", "x4", "y")
head(DiseaseData)
  x1 x2 x3 x4 y
1 33  0  0  0 0
2 35  0  0  0 0
3  6  0  0  0 0
4 60  0  0  0 0
5 18  0  1  0 1
6 26  0  1  0 0
# Fitting the logistic model
DiseaseModel <- glm(y ~ x1 + x2 + x3 + x4, data = DiseaseData, family = binomial(link = 'logit'))
summary(DiseaseModel)

Call:
glm(formula = y ~ x1 + x2 + x3 + x4, family = binomial(link = "logit"), 
    data = DiseaseData)

Deviance Residuals: 
    Min       1Q   Median       3Q      Max  
-1.6552  -0.7529  -0.4788   0.8558   2.0977  

Coefficients:
            Estimate Std. Error z value Pr(>|z|)    
(Intercept) -2.31293    0.64259  -3.599 0.000319 ***
x1           0.02975    0.01350   2.203 0.027577 *  
x2           0.40879    0.59900   0.682 0.494954    
x3          -0.30525    0.60413  -0.505 0.613362    
x4           1.57475    0.50162   3.139 0.001693 ** 
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

(Dispersion parameter for binomial family taken to be 1)

    Null deviance: 122.32  on 97  degrees of freedom
Residual deviance: 101.05  on 93  degrees of freedom
AIC: 111.05

Number of Fisher Scoring iterations: 4
obs_Pi <- data.frame(Pi = DiseaseModel$fitted.values, disease_status = DiseaseModel$y)
obs_Pi
           Pi disease_status
1  0.20896395              0
2  0.21896953              0
3  0.10579477              0
4  0.37099998              0
5  0.11079091              1
6  0.13649792              0
7  0.08019586              0
8  0.27251659              1
9  0.24404261              1
10 0.30930059              0
11 0.16401090              0
12 0.16401090              0
13 0.18098588              0
14 0.11453992              1
15 0.58966189              1
16 0.47909131              1
17 0.77817637              1
18 0.37749723              0
19 0.36362034              1
20 0.42753041              1
21 0.57330701              1
22 0.65081138              1
23 0.43482688              1
24 0.49946392              0
25 0.34820497              0
26 0.48459375              0
27 0.55134495              0
28 0.07184490              0
29 0.30929149              0
30 0.11678980              0
31 0.10506349              0
32 0.16295636              0
33 0.29673562              0
34 0.10029650              0
35 0.56600353              0
36 0.32823272              1
37 0.44025060              0
38 0.29630836              0
39 0.54397514              1
40 0.26625832              0
41 0.77684245              1
42 0.41303463              1
43 0.49395445              0
44 0.79319961              1
45 0.36182805              1
46 0.50690100              0
47 0.38973214              1
48 0.70895535              0
49 0.70278774              1
50 0.69021148              1
51 0.84469844              1
52 0.81204013              1
53 0.42026533              1
54 0.51626099              1
55 0.54588734              0
56 0.44950178              0
57 0.60397800              0
58 0.67903030              0
59 0.54588734              0
60 0.74584435              0
61 0.11079091              0
62 0.07802859              0
63 0.07184490              0
64 0.10506349              0
65 0.29672673              1
66 0.12306855              0
67 0.16813085              0
68 0.19459386              0
69 0.28000626              0
70 0.18543766              0
71 0.18098588              0
72 0.18098588              1
73 0.18543766              0
74 0.31736018              1
75 0.09187623              0
76 0.15114561              0
77 0.22275465              0
78 0.21263033              1
79 0.09764368              0
80 0.20283919              0
81 0.23859602              0
82 0.08019586              0
83 0.33527254              1
84 0.24956481              0
85 0.32215388              0
86 0.11155707              0
87 0.15893898              0
88 0.09959900              0
89 0.08019586              0
90 0.08019586              0
91 0.08241795              0
92 0.07591505              0
93 0.08469594              0
94 0.16295636              0
95 0.15893329              1
96 0.11375589              0
97 0.09187623              0
98 0.17122984              0
# Zeros #################################
correct0 <- obs_Pi %>%
  filter(Pi < .316) %>%
  filter(disease_status == 0)

n_correct0 <- nrow(correct0)
n_correct0
[1] 49
incorrect0 <- obs_Pi %>%
  filter(Pi < .316) %>%
  filter(disease_status == 1)

n_incorrect0 <- nrow(incorrect0)
n_incorrect0
[1] 8
# Ones #################################
correct1 <- obs_Pi %>%
  filter(Pi >= .316) %>%
  filter(disease_status == 1)

n_correct1 <- nrow(correct1)
n_correct1
[1] 23
incorrect1 <- obs_Pi %>%
  filter(Pi >= .316) %>%
  filter(disease_status == 0)

n_incorrect1 <- nrow(incorrect1)
n_incorrect1
[1] 18
# Making a classification table  #################################
classifi_table <- matrix(c(n_correct0, n_incorrect0, n_incorrect1 , n_correct1), nrow = 2)
#classifi_table

rownames(classifi_table) <- c("Y = 0", "Y = 1")
colnames(classifi_table) <- c("Yhat = 0", "Yhat = 1")

as.data.frame(classifi_table)
      Yhat = 0 Yhat = 1
Y = 0       49       18
Y = 1        8       23

Poisson Regression

Miller Lumber Company Example.

LumberData <- read.table("http://www.cnachtsheim-text.csom.umn.edu/Kutner/Chapter%2014%20Data%20Sets/CH14TA14.txt", sep ="" , header = FALSE)
head(LumberData)
  V1  V2    V3 V4   V5   V6
1  9 606 41393  3 3.04 6.32
2  6 641 23635 18 1.95 8.89
3 28 505 55475 27 6.54 2.05
4 11 866 64646 31 1.67 5.81
5  4 599 31972  7 0.72 8.11
6  4 520 41755 23 2.24 6.81
dim(LumberData)
[1] 110   6
# Rename columns
names(LumberData) <- c( "y", "x1", "x2" , "x3", "x4", "x5")
head(LumberData)
   y  x1    x2 x3   x4   x5
1  9 606 41393  3 3.04 6.32
2  6 641 23635 18 1.95 8.89
3 28 505 55475 27 6.54 2.05
4 11 866 64646 31 1.67 5.81
5  4 599 31972  7 0.72 8.11
6  4 520 41755 23 2.24 6.81
# Fitting the poisson regression model
options(scipen = 999) # You can turn off the Scientific notation with options(scipen = 999) and back on again with options(scipen = 0)

LumberModel <- glm(y ~ x1 + x2 + x3 + x4 + x5, data = LumberData, family = poisson(link = 'log'))
summary(LumberModel)

Call:
glm(formula = y ~ x1 + x2 + x3 + x4 + x5, family = poisson(link = "log"), 
    data = LumberData)

Deviance Residuals: 
     Min        1Q    Median        3Q       Max  
-2.93195  -0.58868  -0.00009   0.59269   2.23441  

Coefficients:
                Estimate   Std. Error z value             Pr(>|z|)    
(Intercept)  2.942437968  0.207247707  14.198 < 0.0000000000000002 ***
x1           0.000605767  0.000142121   4.262  0.00002023116145084 ***
x2          -0.000011686  0.000002112  -5.534  0.00000003129249634 ***
x3          -0.003726472  0.001781917  -2.091               0.0365 *  
x4           0.168382986  0.025768998   6.534  0.00000000006389741 ***
x5          -0.128773790  0.016201737  -7.948  0.00000000000000189 ***
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

(Dispersion parameter for poisson family taken to be 1)

    Null deviance: 422.22  on 109  degrees of freedom
Residual deviance: 114.99  on 104  degrees of freedom
AIC: 571.02

Number of Fisher Scoring iterations: 4