1. Importer les données
library(kernlab)
data(spam)
summary(spam[,56:58])
capitalLong capitalTotal type
Min. : 1.00 Min. : 1.0 nonspam:2788
1st Qu.: 6.00 1st Qu.: 35.0 spam :1813
Median : 15.00 Median : 95.0
Mean : 52.17 Mean : 283.3
3rd Qu.: 43.00 3rd Qu.: 266.0
Max. :9989.00 Max. :15841.0
2. L’algorithme des plus proches voisins
set.seed(1234)
spam1 <- spam[sample(nrow(spam)),]
app <- spam1[1:3000,]
valid <- spam1[-(1:3000),]
library(class)
reg3ppv <- knn(app[,-58],valid[,-58],cl=app$type,k=3)
mean(reg3ppv!=valid$type)
[1] 0.1923798
3. Calibration des paramètres
grille.K <- data.frame(k=seq(1,100,by=1))
library(caret)
ctrl1 <- trainControl(method="LGOCV",number=1,index=list(1:3000))
sel.k1 <- train(type~.,data=spam1,method="knn",trControl=ctrl1,tuneGrid=grille.K)
sel.k1
k-Nearest Neighbors
4601 samples
57 predictor
2 classes: 'nonspam', 'spam'
No pre-processing
Resampling: Repeated Train/Test Splits Estimated (1 reps, 75%)
Summary of sample sizes: 3000
Resampling results across tuning parameters:
k Accuracy Kappa
1 0.7926296 0.5601299
2 0.7695191 0.5133254
3 0.8076202 0.5894339
4 0.7920050 0.5551572
5 0.8082448 0.5918892
6 0.7995003 0.5730181
7 0.7938788 0.5606447
8 0.7876327 0.5484334
9 0.7870081 0.5480684
10 0.7807620 0.5350978
11 0.7763898 0.5262578
12 0.7657714 0.5042128
13 0.7670206 0.5062584
14 0.7682698 0.5100951
15 0.7676452 0.5086259
16 0.7638976 0.5022077
17 0.7651468 0.5051403
18 0.7695191 0.5133254
19 0.7620237 0.4984084
20 0.7695191 0.5136195
21 0.7607745 0.4948607
22 0.7582761 0.4895851
23 0.7607745 0.4948607
24 0.7576515 0.4893469
25 0.7570269 0.4863261
26 0.7557776 0.4852433
27 0.7507808 0.4759763
28 0.7495315 0.4733496
29 0.7470331 0.4680963
30 0.7526546 0.4806981
31 0.7495315 0.4742993
32 0.7489069 0.4725127
33 0.7495315 0.4736666
34 0.7476577 0.4705254
35 0.7482823 0.4723116
36 0.7495315 0.4746150
37 0.7482823 0.4726282
38 0.7426608 0.4603650
39 0.7464085 0.4682238
40 0.7457839 0.4657912
41 0.7439101 0.4626620
42 0.7426608 0.4613354
43 0.7426608 0.4603650
44 0.7426608 0.4593911
45 0.7401624 0.4534850
46 0.7395378 0.4523362
47 0.7364147 0.4456025
48 0.7351655 0.4433104
49 0.7314179 0.4344113
50 0.7339163 0.4400103
51 0.7339163 0.4410209
52 0.7357901 0.4454578
53 0.7351655 0.4446475
54 0.7364147 0.4466025
55 0.7332917 0.4405502
56 0.7326671 0.4394083
57 0.7332917 0.4392027
58 0.7339163 0.4410209
59 0.7351655 0.4443138
60 0.7351655 0.4436453
61 0.7345409 0.4428359
62 0.7307933 0.4332669
63 0.7320425 0.4358967
64 0.7326671 0.4363622
65 0.7326671 0.4367023
66 0.7289194 0.4274236
67 0.7289194 0.4277698
68 0.7295440 0.4285701
69 0.7295440 0.4289157
70 0.7307933 0.4308652
71 0.7276702 0.4223393
72 0.7351655 0.4395998
73 0.7276702 0.4230402
74 0.7301686 0.4286800
75 0.7251718 0.4173935
76 0.7245472 0.4162466
77 0.7245472 0.4155372
78 0.7226733 0.4124538
79 0.7232979 0.4135992
80 0.7282948 0.4234891
81 0.7239225 0.4147453
82 0.7264210 0.4214466
83 0.7195503 0.4056570
84 0.7251718 0.4166853
85 0.7289194 0.4239393
86 0.7245472 0.4148261
87 0.7251718 0.4159754
88 0.7276702 0.4212847
89 0.7245472 0.4144699
90 0.7226733 0.4113824
91 0.7245472 0.4166006
92 0.7232979 0.4132431
93 0.7220487 0.4109518
94 0.7220487 0.4113090
95 0.7207995 0.4083044
96 0.7220487 0.4123782
97 0.7201749 0.4068007
98 0.7195503 0.4052960
99 0.7201749 0.4068007
100 0.7201749 0.4064403
Accuracy was used to select the optimal model using the largest value.
The final value used for the model was k = 5.
sel.k1$bestTune
k
5 5
plot(sel.k1)
4. Compléments
ctrl2 <- trainControl(method="cv",number=10)
set.seed(123)
sel.k2 <- train(type~.,data=spam1,method="knn",trControl=ctrl2,tuneGrid=grille.K)
sel.k2
k-Nearest Neighbors
4601 samples
57 predictor
2 classes: 'nonspam', 'spam'
No pre-processing
Resampling: Cross-Validated (10 fold)
Summary of sample sizes: 4140, 4141, 4142, 4141, 4140, 4141, ...
Resampling results across tuning parameters:
k Accuracy Kappa
1 0.8219977 0.6275191
2 0.7878747 0.5555259
3 0.8046163 0.5909410
4 0.7976588 0.5756348
5 0.8083077 0.5972499
6 0.7967854 0.5726804
7 0.7961318 0.5709071
8 0.7928667 0.5643528
9 0.7956947 0.5698995
10 0.7913516 0.5605425
11 0.7900435 0.5570775
12 0.7856919 0.5472803
13 0.7865567 0.5493465
14 0.7856942 0.5478743
15 0.7850382 0.5468740
16 0.7835189 0.5434216
17 0.7815623 0.5392667
18 0.7809092 0.5376879
19 0.7796077 0.5356960
20 0.7776498 0.5314279
21 0.7787386 0.5330840
22 0.7776498 0.5309019
23 0.7767835 0.5291132
24 0.7739546 0.5225627
25 0.7717792 0.5185176
26 0.7687386 0.5111582
27 0.7678775 0.5100120
28 0.7683057 0.5112264
29 0.7659130 0.5059844
30 0.7626516 0.4995937
31 0.7617825 0.4978908
32 0.7600439 0.4939730
33 0.7593964 0.4926892
34 0.7587390 0.4907674
35 0.7572230 0.4878106
36 0.7567853 0.4868840
37 0.7600425 0.4937430
38 0.7587362 0.4910000
39 0.7548283 0.4826306
40 0.7543945 0.4818373
41 0.7522168 0.4770132
42 0.7543940 0.4819442
43 0.7539583 0.4815775
44 0.7533042 0.4794285
45 0.7537395 0.4808282
46 0.7565642 0.4867105
47 0.7541757 0.4814870
48 0.7537343 0.4806773
49 0.7539569 0.4815175
50 0.7506998 0.4745809
51 0.7511294 0.4758656
52 0.7513487 0.4758600
53 0.7517830 0.4771916
54 0.7502626 0.4742710
55 0.7511289 0.4756932
56 0.7476563 0.4685760
57 0.7496095 0.4729324
58 0.7487390 0.4711897
59 0.7506984 0.4747852
60 0.7478775 0.4684415
61 0.7496109 0.4724651
62 0.7478728 0.4688660
63 0.7491766 0.4707844
64 0.7493954 0.4718338
65 0.7474394 0.4679162
66 0.7459139 0.4642980
67 0.7459120 0.4640560
68 0.7443940 0.4615036
69 0.7413467 0.4550304
70 0.7406946 0.4536061
71 0.7413449 0.4547164
72 0.7417829 0.4550048
73 0.7411251 0.4535518
74 0.7400410 0.4518814
75 0.7398231 0.4511605
76 0.7400400 0.4517228
77 0.7422144 0.4562877
78 0.7409101 0.4533506
79 0.7376511 0.4464004
80 0.7359115 0.4424012
81 0.7380849 0.4476624
82 0.7391714 0.4496107
83 0.7396076 0.4501970
84 0.7380830 0.4473681
85 0.7378652 0.4462485
86 0.7391676 0.4493235
87 0.7391672 0.4486874
88 0.7402565 0.4512176
89 0.7409058 0.4522343
90 0.7385145 0.4470966
91 0.7389465 0.4483787
92 0.7387319 0.4479428
93 0.7419890 0.4542480
94 0.7409020 0.4513562
95 0.7409039 0.4517932
96 0.7404710 0.4505429
97 0.7380807 0.4452603
98 0.7385145 0.4468839
99 0.7382976 0.4462437
100 0.7380816 0.4456849
Accuracy was used to select the optimal model using the largest value.
The final value used for the model was k = 1.
ctrl3 <- trainControl(method="repeatedcv",number=10,repeats=2)
train(type~.,data=spam1,method="knn",trControl=ctrl3,tuneGrid=grille.K)
k-Nearest Neighbors
4601 samples
57 predictor
2 classes: 'nonspam', 'spam'
No pre-processing
Resampling: Cross-Validated (10 fold, repeated 2 times)
Summary of sample sizes: 4141, 4141, 4142, 4141, 4141, 4141, ...
Resampling results across tuning parameters:
k Accuracy Kappa
1 0.8243861 0.6320533
2 0.8037327 0.5891112
3 0.8064477 0.5939632
4 0.8004688 0.5810236
5 0.8069870 0.5937498
6 0.7992760 0.5773839
7 0.7991639 0.5771701
8 0.7979678 0.5752534
9 0.7941675 0.5662960
10 0.7940569 0.5655441
11 0.7912334 0.5600927
12 0.7892762 0.5555983
13 0.7868858 0.5502361
14 0.7865604 0.5496083
15 0.7875394 0.5516145
16 0.7818870 0.5392169
17 0.7808017 0.5381134
18 0.7810195 0.5381363
19 0.7791715 0.5344674
20 0.7781904 0.5327331
21 0.7775399 0.5317481
22 0.7787343 0.5337371
23 0.7750406 0.5259267
24 0.7751509 0.5258723
25 0.7724330 0.5206945
26 0.7703692 0.5161799
27 0.7711280 0.5177800
28 0.7684115 0.5117097
29 0.7701545 0.5155006
30 0.7662433 0.5074367
31 0.7641769 0.5023876
32 0.7635242 0.5014870
33 0.7614597 0.4964891
34 0.7612426 0.4962248
35 0.7616771 0.4967750
36 0.7604819 0.4939416
37 0.7597175 0.4927048
38 0.7581991 0.4898694
39 0.7570034 0.4871461
40 0.7546114 0.4820043
41 0.7525450 0.4774256
42 0.7542851 0.4813070
43 0.7522192 0.4769890
44 0.7528725 0.4780107
45 0.7530909 0.4783271
46 0.7529819 0.4783737
47 0.7535256 0.4795279
48 0.7542882 0.4814234
49 0.7547211 0.4826060
50 0.7536327 0.4798782
51 0.7513539 0.4753600
52 0.7510238 0.4751692
53 0.7513513 0.4756256
54 0.7524394 0.4781804
55 0.7504826 0.4738334
56 0.7517851 0.4765925
57 0.7503718 0.4737363
58 0.7515672 0.4761819
59 0.7499382 0.4728771
60 0.7484176 0.4693265
61 0.7483094 0.4692964
62 0.7489618 0.4701572
63 0.7478756 0.4683728
64 0.7453772 0.4632121
65 0.7460306 0.4645802
66 0.7464649 0.4651508
67 0.7473347 0.4669320
68 0.7448361 0.4618373
69 0.7434233 0.4584999
70 0.7444006 0.4609431
71 0.7442926 0.4604232
72 0.7422284 0.4558715
73 0.7421185 0.4555345
74 0.7415736 0.4543143
75 0.7429869 0.4573038
76 0.7414665 0.4540053
77 0.7388586 0.4490644
78 0.7385311 0.4485460
79 0.7379880 0.4477112
80 0.7391842 0.4499945
81 0.7389680 0.4493613
82 0.7387503 0.4488550
83 0.7378808 0.4472695
84 0.7388609 0.4489165
85 0.7390767 0.4497906
86 0.7386421 0.4484761
87 0.7407073 0.4528411
88 0.7398380 0.4510101
89 0.7409245 0.4528288
90 0.7402714 0.4509140
91 0.7404881 0.4516308
92 0.7398387 0.4502413
93 0.7379904 0.4464987
94 0.7373371 0.4454463
95 0.7374460 0.4456614
96 0.7362517 0.4431388
97 0.7358172 0.4419612
98 0.7377733 0.4461888
99 0.7372319 0.4448349
100 0.7373399 0.4447708
Accuracy was used to select the optimal model using the largest value.
The final value used for the model was k = 1.
set.seed(123)
system.time(sel.k3 <- train(type~.,data=spam1,method="knn",trControl=ctrl2,tuneGrid=grille.K))
utilisateur système écoulé
190.42 1.92 202.01
library(doParallel)
cl <- makePSOCKcluster(4)
registerDoParallel(cl) ## les clusters seront fermés en fin de programme
set.seed(123)
system.time(sel.k4 <- train(type~.,data=spam1,method="knn",trControl=ctrl2,tuneGrid=grille.K))
utilisateur système écoulé
1.40 0.12 108.85
ctrl3 <- trainControl(method="LGOCV",number=1,index=list(1:3000),classProbs=TRUE,summary=twoClassSummary)
sel.k5 <- train(type~.,data=spam1,method="knn",trControl=ctrl3,metric="ROC",tuneGrid=grille.K)
sel.k5
k-Nearest Neighbors
4601 samples
57 predictor
2 classes: 'nonspam', 'spam'
No pre-processing
Resampling: Repeated Train/Test Splits Estimated (1 reps, 75%)
Summary of sample sizes: 3000
Resampling results across tuning parameters:
k ROC Sens Spec
1 0.7787106 0.8394309 0.7179903
2 0.8338225 0.8353659 0.7212318
3 0.8533736 0.8638211 0.7179903
4 0.8678845 0.8424797 0.6871961
5 0.8717494 0.8577236 0.7293355
6 0.8669753 0.8495935 0.7212318
7 0.8660571 0.8495935 0.7082658
8 0.8652656 0.8424797 0.7050243
9 0.8619854 0.8353659 0.7082658
10 0.8586534 0.8282520 0.7034036
11 0.8562552 0.8262195 0.6985413
12 0.8537961 0.8119919 0.6888169
13 0.8540547 0.8170732 0.6855754
14 0.8512744 0.8140244 0.6904376
15 0.8483639 0.8140244 0.6920583
16 0.8468231 0.8119919 0.7034036
17 0.8450722 0.8079268 0.6969206
18 0.8457558 0.8140244 0.6936791
19 0.8435511 0.8069106 0.6969206
20 0.8439505 0.8058943 0.6952998
21 0.8429878 0.8058943 0.6888169
22 0.8402577 0.8079268 0.6823339
23 0.8403129 0.8058943 0.6904376
24 0.8374313 0.8018293 0.6904376
25 0.8355347 0.8028455 0.6758509
26 0.8347910 0.7987805 0.6871961
27 0.8324423 0.7936992 0.6855754
28 0.8330344 0.7906504 0.6920583
29 0.8309492 0.7845528 0.6839546
30 0.8311707 0.7865854 0.6904376
31 0.8308586 0.7876016 0.6920583
32 0.8300968 0.7845528 0.6952998
33 0.8290476 0.7886179 0.6871961
34 0.8285238 0.7825203 0.6839546
35 0.8278411 0.7835366 0.6920583
36 0.8278312 0.7896341 0.6888169
37 0.8261026 0.7825203 0.6920583
38 0.8248598 0.7845528 0.6871961
39 0.8243113 0.7835366 0.6904376
40 0.8236426 0.7825203 0.6839546
41 0.8228067 0.7825203 0.6839546
42 0.8229163 0.7865854 0.6871961
43 0.8233676 0.7794715 0.6855754
44 0.8225177 0.7804878 0.6726094
45 0.8213812 0.7845528 0.6693679
46 0.8214849 0.7754065 0.6709887
47 0.8206227 0.7794715 0.6677472
48 0.8198510 0.7794715 0.6677472
49 0.8191288 0.7774390 0.6580227
50 0.8182426 0.7774390 0.6596434
51 0.8178144 0.7743902 0.6709887
52 0.8167009 0.7713415 0.6726094
53 0.8160050 0.7723577 0.6742301
54 0.8146470 0.7754065 0.6693679
55 0.8137370 0.7723577 0.6726094
56 0.8130518 0.7733740 0.6807131
57 0.8112779 0.7764228 0.6645057
58 0.8103398 0.7764228 0.6612642
59 0.8105680 0.7733740 0.6726094
60 0.8107533 0.7743902 0.6677472
61 0.8101051 0.7743902 0.6709887
62 0.8099075 0.7754065 0.6612642
63 0.8100434 0.7774390 0.6612642
64 0.8099338 0.7804878 0.6645057
65 0.8092363 0.7794715 0.6580227
66 0.8083575 0.7835366 0.6482982
67 0.8084415 0.7794715 0.6450567
68 0.8085000 0.7804878 0.6499190
69 0.8077745 0.7815041 0.6499190
70 0.8071000 0.7865854 0.6418152
71 0.8073536 0.7865854 0.6337115
72 0.8070209 0.7876016 0.6401945
73 0.8065441 0.7865854 0.6369530
74 0.8063802 0.7835366 0.6385737
75 0.8055254 0.7845528 0.6337115
76 0.8053722 0.7835366 0.6418152
77 0.8051959 0.7845528 0.6288493
78 0.8043197 0.7815041 0.6288493
79 0.8038083 0.7815041 0.6304700
80 0.8042711 0.7835366 0.6288493
81 0.8034624 0.7825203 0.6304700
82 0.8040627 0.7804878 0.6385737
83 0.8039499 0.7784553 0.6272285
84 0.8036608 0.7855691 0.6320908
85 0.8031140 0.7916667 0.6320908
86 0.8025136 0.7835366 0.6191248
87 0.8024807 0.7865854 0.6256078
88 0.8017354 0.7876016 0.6272285
89 0.8009629 0.7865854 0.6256078
90 0.8012437 0.7825203 0.6304700
91 0.8013170 0.7815041 0.6337115
92 0.8003568 0.7825203 0.6304700
93 0.7999565 0.7804878 0.6304700
94 0.8000389 0.7804878 0.6256078
95 0.7998709 0.7794715 0.6256078
96 0.8002275 0.7784553 0.6353323
97 0.8000776 0.7784553 0.6256078
98 0.7994706 0.7784553 0.6239870
99 0.7993669 0.7794715 0.6256078
100 0.7991305 0.7804878 0.6239870
ROC was used to select the optimal model using the largest value.
The final value used for the model was k = 5.
getTrainPerf(sel.k5)
TrainROC TrainSens TrainSpec method
1 0.8717494 0.8577236 0.7293355 knn
Pour aller plus loin
ctrl3 <- trainControl(method="repeatedcv",number=10,repeats=20)
train(type~.,data=spam1,method="knn",trControl=ctrl3,tuneGrid=grille.K)
k-Nearest Neighbors
4601 samples
57 predictor
2 classes: 'nonspam', 'spam'
No pre-processing
Resampling: Cross-Validated (10 fold, repeated 20 times)
Summary of sample sizes: 4141, 4141, 4140, 4141, 4142, 4141, ...
Resampling results across tuning parameters:
k Accuracy Kappa
1 0.8241797 0.6318173
2 0.7952963 0.5722752
3 0.8094002 0.6000138
4 0.8019318 0.5843697
5 0.8071812 0.5944573
6 0.7996715 0.5787314
7 0.7998889 0.5786355
8 0.7951728 0.5688041
9 0.7957269 0.5699263
10 0.7930105 0.5638193
11 0.7923910 0.5622948
12 0.7893062 0.5556860
13 0.7894691 0.5557289
14 0.7869158 0.5503616
15 0.7878928 0.5525382
16 0.7850247 0.5461913
17 0.7827545 0.5413428
18 0.7796884 0.5349357
19 0.7790589 0.5338392
20 0.7775268 0.5307422
21 0.7774064 0.5307079
22 0.7753848 0.5263808
23 0.7749395 0.5256765
24 0.7731250 0.5217522
25 0.7716582 0.5186919
26 0.7699403 0.5149718
27 0.7692989 0.5137239
28 0.7673644 0.5094756
29 0.7672130 0.5093858
30 0.7659528 0.5064414
31 0.7639312 0.5021658
32 0.7622578 0.4985035
33 0.7611492 0.4962405
34 0.7603451 0.4944455
35 0.7601275 0.4940502
36 0.7590192 0.4917185
37 0.7584763 0.4905903
38 0.7569663 0.4871296
39 0.7559008 0.4849494
40 0.7550966 0.4831209
41 0.7542919 0.4813333
42 0.7533684 0.4792529
43 0.7530425 0.4786637
44 0.7528358 0.4780289
45 0.7528243 0.4781668
46 0.7522162 0.4767968
47 0.7527596 0.4780641
48 0.7528896 0.4785885
49 0.7531835 0.4793624
50 0.7524336 0.4777787
51 0.7529766 0.4792190
52 0.7524760 0.4783404
53 0.7526607 0.4788904
54 0.7520307 0.4776816
55 0.7522264 0.4781470
56 0.7506836 0.4749291
57 0.7510204 0.4756776
58 0.7518129 0.4771601
59 0.7514548 0.4763380
60 0.7504226 0.4740550
61 0.7498458 0.4727917
62 0.7494004 0.4718060
63 0.7488462 0.4706376
64 0.7483030 0.4693270
65 0.7481510 0.4689385
66 0.7474556 0.4674519
67 0.7472058 0.4669103
68 0.7466734 0.4657497
69 0.7456190 0.4634119
70 0.7451950 0.4624383
71 0.7443154 0.4605991
72 0.7436197 0.4590708
73 0.7427612 0.4574055
74 0.7422292 0.4561342
75 0.7410661 0.4538279
76 0.7401423 0.4517283
77 0.7394252 0.4502460
78 0.7395451 0.4505279
79 0.7388061 0.4489697
80 0.7388930 0.4490716
81 0.7384474 0.4481420
82 0.7389035 0.4489749
83 0.7387188 0.4485264
84 0.7388281 0.4486484
85 0.7388491 0.4487291
86 0.7386003 0.4482225
87 0.7386649 0.4482063
88 0.7390345 0.4488975
89 0.7390558 0.4488102
90 0.7394144 0.4496569
91 0.7392299 0.4491917
92 0.7392952 0.4492554
93 0.7388391 0.4483138
94 0.7386327 0.4478359
95 0.7382090 0.4470468
96 0.7376659 0.4457329
97 0.7376438 0.4455829
98 0.7378829 0.4461227
99 0.7378178 0.4459116
100 0.7373288 0.4448061
Accuracy was used to select the optimal model using the largest value.
The final value used for the model was k = 1.
stopCluster(cl)
LS0tDQp0aXRsZTogIkNhbGlicmF0aW9uIGQndW4gYWxnb3JpdGhtZSBhdmVjIGNhcmV0Ig0KYXV0aG9yOiAiSHVzc29uIGV0IGFsLiINCmRhdGU6ICIwOS8wOS8yMDE4Ig0Kb3V0cHV0Og0KICBodG1sX25vdGVib29rOg0KICAgIHRvYzogeWVzDQogICAgdG9jX2RlcHRoOiAzDQogICAgdG9jX2Zsb2F0OiB5ZXMNCiAgaHRtbF9kb2N1bWVudDoNCiAgICB0b2M6IHllcw0KICAgIHRvY19kZXB0aDogJzMnDQogICAgdG9jX2Zsb2F0OiB5ZXMNCi0tLQ0KDQojIDEuIEltcG9ydGVyIGxlcyBkb25uw6llcw0KDQpgYGB7cixtZXNzYWdlPUZBTFNFLHdhcm5pbmc9RkFMU0V9DQpsaWJyYXJ5KGtlcm5sYWIpDQpkYXRhKHNwYW0pDQpzdW1tYXJ5KHNwYW1bLDU2OjU4XSkNCmBgYA0KDQojIDIuIEzigJlhbGdvcml0aG1lIGRlcyBwbHVzIHByb2NoZXMgdm9pc2lucw0KDQpgYGB7cn0NCnNldC5zZWVkKDEyMzQpDQpzcGFtMSA8LSBzcGFtW3NhbXBsZShucm93KHNwYW0pKSxdDQphcHAgPC0gc3BhbTFbMTozMDAwLF0NCnZhbGlkIDwtIHNwYW0xWy0oMTozMDAwKSxdDQpgYGANCg0KYGBge3IsbWVzc2FnZT1GQUxTRSx3YXJuaW5nPUZBTFNFfQ0KbGlicmFyeShjbGFzcykNCnJlZzNwcHYgPC0ga25uKGFwcFssLTU4XSx2YWxpZFssLTU4XSxjbD1hcHAkdHlwZSxrPTMpDQptZWFuKHJlZzNwcHYhPXZhbGlkJHR5cGUpDQpgYGANCg0KIyAzLiBDYWxpYnJhdGlvbiBkZXMgcGFyYW3DqHRyZXMNCg0KYGBge3IsbWVzc2FnZT1GQUxTRSx3YXJuaW5nPUZBTFNFfQ0KZ3JpbGxlLksgPC0gZGF0YS5mcmFtZShrPXNlcSgxLDEwMCxieT0xKSkNCmxpYnJhcnkoY2FyZXQpDQpjdHJsMSA8LSB0cmFpbkNvbnRyb2wobWV0aG9kPSJMR09DViIsbnVtYmVyPTEsaW5kZXg9bGlzdCgxOjMwMDApKQ0Kc2VsLmsxIDwtIHRyYWluKHR5cGV+LixkYXRhPXNwYW0xLG1ldGhvZD0ia25uIix0ckNvbnRyb2w9Y3RybDEsdHVuZUdyaWQ9Z3JpbGxlLkspDQpzZWwuazENCmBgYA0KDQpgYGB7cn0NCnNlbC5rMSRiZXN0VHVuZQ0KYGBgDQoNCmBgYHtyfQ0KcGxvdChzZWwuazEpDQpgYGANCg0KIyA0LiBDb21wbMOpbWVudHMNCg0KYGBge3J9DQpjdHJsMiA8LSB0cmFpbkNvbnRyb2wobWV0aG9kPSJjdiIsbnVtYmVyPTEwKQ0Kc2V0LnNlZWQoMTIzKQ0Kc2VsLmsyIDwtIHRyYWluKHR5cGV+LixkYXRhPXNwYW0xLG1ldGhvZD0ia25uIix0ckNvbnRyb2w9Y3RybDIsdHVuZUdyaWQ9Z3JpbGxlLkspDQpzZWwuazINCmBgYA0KDQpgYGB7cn0NCmN0cmwzIDwtIHRyYWluQ29udHJvbChtZXRob2Q9InJlcGVhdGVkY3YiLG51bWJlcj0xMCxyZXBlYXRzPTIpDQp0cmFpbih0eXBlfi4sZGF0YT1zcGFtMSxtZXRob2Q9ImtubiIsdHJDb250cm9sPWN0cmwzLHR1bmVHcmlkPWdyaWxsZS5LKQ0KYGBgDQoNCmBgYHtyLG1lc3NhZ2U9RkFMU0Usd2FybmluZz1GQUxTRX0NCnNldC5zZWVkKDEyMykNCnN5c3RlbS50aW1lKHNlbC5rMyA8LSB0cmFpbih0eXBlfi4sZGF0YT1zcGFtMSxtZXRob2Q9ImtubiIsdHJDb250cm9sPWN0cmwyLHR1bmVHcmlkPWdyaWxsZS5LKSkNCg0KbGlicmFyeShkb1BhcmFsbGVsKQ0KY2wgPC0gbWFrZVBTT0NLY2x1c3Rlcig0KQ0KcmVnaXN0ZXJEb1BhcmFsbGVsKGNsKSAgICAgIyMgbGVzIGNsdXN0ZXJzIHNlcm9udCBmZXJtw6lzIGVuIGZpbiBkZSBwcm9ncmFtbWUNCnNldC5zZWVkKDEyMykNCnN5c3RlbS50aW1lKHNlbC5rNCA8LSB0cmFpbih0eXBlfi4sZGF0YT1zcGFtMSxtZXRob2Q9ImtubiIsdHJDb250cm9sPWN0cmwyLHR1bmVHcmlkPWdyaWxsZS5LKSkNCmBgYA0KDQpgYGB7cn0NCmN0cmwzIDwtIHRyYWluQ29udHJvbChtZXRob2Q9IkxHT0NWIixudW1iZXI9MSxpbmRleD1saXN0KDE6MzAwMCksY2xhc3NQcm9icz1UUlVFLHN1bW1hcnk9dHdvQ2xhc3NTdW1tYXJ5KQ0Kc2VsLms1IDwtIHRyYWluKHR5cGV+LixkYXRhPXNwYW0xLG1ldGhvZD0ia25uIix0ckNvbnRyb2w9Y3RybDMsbWV0cmljPSJST0MiLHR1bmVHcmlkPWdyaWxsZS5LKQ0Kc2VsLms1DQpgYGANCg0KYGBge3J9DQpnZXRUcmFpblBlcmYoc2VsLms1KQ0KYGBgDQoNCiMgUG91ciBhbGxlciBwbHVzIGxvaW4NCg0KYGBge3J9DQpjdHJsMyA8LSB0cmFpbkNvbnRyb2wobWV0aG9kPSJyZXBlYXRlZGN2IixudW1iZXI9MTAscmVwZWF0cz0yMCkNCnRyYWluKHR5cGV+LixkYXRhPXNwYW0xLG1ldGhvZD0ia25uIix0ckNvbnRyb2w9Y3RybDMsdHVuZUdyaWQ9Z3JpbGxlLkspDQpgYGANCmBgYHtyfQ0Kc3RvcENsdXN0ZXIoY2wpDQpgYGANCg0KDQo=