-
Notifications
You must be signed in to change notification settings - Fork 0
/
decision_tree
1197 lines (1197 loc) · 76.4 KB
/
decision_tree
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
digraph Tree {
node [shape=box, style="filled, rounded", color="black", fontname="helvetica"] ;
edge [fontname="helvetica"] ;
0 [label=<V14 ≤ -0.103<br/>entropy = 1.0<br/>samples = 37372<br/>value = [18686, 18686]<br/>class = No Fraud>, fillcolor="#ffffff"] ;
1 [label=<V10 ≤ -0.002<br/>entropy = 0.13<br/>samples = 16603<br/>value = [300, 16303]<br/>class = Fraud>, fillcolor="#3d9fe5"] ;
0 -> 1 [labeldistance=2.5, labelangle=45, headlabel="True"] ;
2 [label=<V14 ≤ -0.321<br/>entropy = 0.014<br/>samples = 15465<br/>value = [20, 15445]<br/>class = Fraud>, fillcolor="#399de5"] ;
1 -> 2 ;
3 [label=<V7 ≤ 1.644<br/>entropy = 0.004<br/>samples = 15016<br/>value = [4, 15012]<br/>class = Fraud>, fillcolor="#399de5"] ;
2 -> 3 ;
4 [label=<V4 ≤ -0.732<br/>entropy = 0.002<br/>samples = 14993<br/>value = [2, 14991]<br/>class = Fraud>, fillcolor="#399de5"] ;
3 -> 4 ;
5 [label=<V19 ≤ -0.518<br/>entropy = 0.235<br/>samples = 26<br/>value = [1, 25]<br/>class = Fraud>, fillcolor="#41a1e6"] ;
4 -> 5 ;
6 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
5 -> 6 ;
7 [label=<entropy = 0.0<br/>samples = 25<br/>value = [0, 25]<br/>class = Fraud>, fillcolor="#399de5"] ;
5 -> 7 ;
8 [label=<V15 ≤ 1.159<br/>entropy = 0.001<br/>samples = 14967<br/>value = [1, 14966]<br/>class = Fraud>, fillcolor="#399de5"] ;
4 -> 8 ;
9 [label=<entropy = 0.0<br/>samples = 13503<br/>value = [0, 13503]<br/>class = Fraud>, fillcolor="#399de5"] ;
8 -> 9 ;
10 [label=<V15 ≤ 1.159<br/>entropy = 0.008<br/>samples = 1464<br/>value = [1, 1463]<br/>class = Fraud>, fillcolor="#399de5"] ;
8 -> 10 ;
11 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
10 -> 11 ;
12 [label=<entropy = 0.0<br/>samples = 1463<br/>value = [0, 1463]<br/>class = Fraud>, fillcolor="#399de5"] ;
10 -> 12 ;
13 [label=<V17 ≤ 1.379<br/>entropy = 0.426<br/>samples = 23<br/>value = [2, 21]<br/>class = Fraud>, fillcolor="#4ca6e7"] ;
3 -> 13 ;
14 [label=<entropy = 0.0<br/>samples = 21<br/>value = [0, 21]<br/>class = Fraud>, fillcolor="#399de5"] ;
13 -> 14 ;
15 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
13 -> 15 ;
16 [label=<V16 ≤ 0.586<br/>entropy = 0.222<br/>samples = 449<br/>value = [16, 433]<br/>class = Fraud>, fillcolor="#40a1e6"] ;
2 -> 16 ;
17 [label=<V3 ≤ 0.602<br/>entropy = 0.043<br/>samples = 430<br/>value = [2, 428]<br/>class = Fraud>, fillcolor="#3a9de5"] ;
16 -> 17 ;
18 [label=<V26 ≤ 1.814<br/>entropy = 0.024<br/>samples = 429<br/>value = [1, 428]<br/>class = Fraud>, fillcolor="#399de5"] ;
17 -> 18 ;
19 [label=<entropy = 0.0<br/>samples = 421<br/>value = [0, 421]<br/>class = Fraud>, fillcolor="#399de5"] ;
18 -> 19 ;
20 [label=<V19 ≤ -0.3<br/>entropy = 0.544<br/>samples = 8<br/>value = [1, 7]<br/>class = Fraud>, fillcolor="#55abe9"] ;
18 -> 20 ;
21 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
20 -> 21 ;
22 [label=<entropy = 0.0<br/>samples = 7<br/>value = [0, 7]<br/>class = Fraud>, fillcolor="#399de5"] ;
20 -> 22 ;
23 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
17 -> 23 ;
24 [label=<V18 ≤ 0.394<br/>entropy = 0.831<br/>samples = 19<br/>value = [14, 5]<br/>class = No Fraud>, fillcolor="#eeae80"] ;
16 -> 24 ;
25 [label=<entropy = 0.0<br/>samples = 5<br/>value = [0, 5]<br/>class = Fraud>, fillcolor="#399de5"] ;
24 -> 25 ;
26 [label=<entropy = 0.0<br/>samples = 14<br/>value = [14, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
24 -> 26 ;
27 [label=<V4 ≤ 0.053<br/>entropy = 0.805<br/>samples = 1138<br/>value = [280, 858]<br/>class = Fraud>, fillcolor="#7abded"] ;
1 -> 27 ;
28 [label=<V7 ≤ 0.532<br/>entropy = 0.97<br/>samples = 412<br/>value = [248, 164]<br/>class = No Fraud>, fillcolor="#f6d4bc"] ;
27 -> 28 ;
29 [label=<V17 ≤ 0.536<br/>entropy = 0.708<br/>samples = 176<br/>value = [34, 142]<br/>class = Fraud>, fillcolor="#68b4eb"] ;
28 -> 29 ;
30 [label=<V10 ≤ 5.063<br/>entropy = 0.076<br/>samples = 108<br/>value = [1, 107]<br/>class = Fraud>, fillcolor="#3b9ee5"] ;
29 -> 30 ;
31 [label=<entropy = 0.0<br/>samples = 107<br/>value = [0, 107]<br/>class = Fraud>, fillcolor="#399de5"] ;
30 -> 31 ;
32 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
30 -> 32 ;
33 [label=<V8 ≤ -1.019<br/>entropy = 0.999<br/>samples = 68<br/>value = [33, 35]<br/>class = Fraud>, fillcolor="#f4f9fe"] ;
29 -> 33 ;
34 [label=<V12 ≤ 0.864<br/>entropy = 0.211<br/>samples = 30<br/>value = [1, 29]<br/>class = Fraud>, fillcolor="#40a0e6"] ;
33 -> 34 ;
35 [label=<entropy = 0.0<br/>samples = 29<br/>value = [0, 29]<br/>class = Fraud>, fillcolor="#399de5"] ;
34 -> 35 ;
36 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
34 -> 36 ;
37 [label=<V22 ≤ -0.347<br/>entropy = 0.629<br/>samples = 38<br/>value = [32, 6]<br/>class = No Fraud>, fillcolor="#ea995e"] ;
33 -> 37 ;
38 [label=<V15 ≤ 0.034<br/>entropy = 0.996<br/>samples = 13<br/>value = [7, 6]<br/>class = No Fraud>, fillcolor="#fbede3"] ;
37 -> 38 ;
39 [label=<V16 ≤ -0.183<br/>entropy = 0.592<br/>samples = 7<br/>value = [1, 6]<br/>class = Fraud>, fillcolor="#5aade9"] ;
38 -> 39 ;
40 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
39 -> 40 ;
41 [label=<entropy = 0.0<br/>samples = 6<br/>value = [0, 6]<br/>class = Fraud>, fillcolor="#399de5"] ;
39 -> 41 ;
42 [label=<entropy = 0.0<br/>samples = 6<br/>value = [6, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
38 -> 42 ;
43 [label=<entropy = 0.0<br/>samples = 25<br/>value = [25, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
37 -> 43 ;
44 [label=<V5 ≤ 0.207<br/>entropy = 0.447<br/>samples = 236<br/>value = [214, 22]<br/>class = No Fraud>, fillcolor="#e88e4d"] ;
28 -> 44 ;
45 [label=<V21 ≤ -0.059<br/>entropy = 0.99<br/>samples = 25<br/>value = [11, 14]<br/>class = Fraud>, fillcolor="#d5eaf9"] ;
44 -> 45 ;
46 [label=<V19 ≤ -0.141<br/>entropy = 0.414<br/>samples = 12<br/>value = [11, 1]<br/>class = No Fraud>, fillcolor="#e78c4b"] ;
45 -> 46 ;
47 [label=<entropy = 0.0<br/>samples = 11<br/>value = [11, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
46 -> 47 ;
48 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
46 -> 48 ;
49 [label=<entropy = 0.0<br/>samples = 13<br/>value = [0, 13]<br/>class = Fraud>, fillcolor="#399de5"] ;
45 -> 49 ;
50 [label=<V10 ≤ 0.067<br/>entropy = 0.233<br/>samples = 211<br/>value = [203, 8]<br/>class = No Fraud>, fillcolor="#e68641"] ;
44 -> 50 ;
51 [label=<V9 ≤ 0.548<br/>entropy = 0.874<br/>samples = 17<br/>value = [12, 5]<br/>class = No Fraud>, fillcolor="#f0b58b"] ;
50 -> 51 ;
52 [label=<entropy = 0.0<br/>samples = 12<br/>value = [12, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
51 -> 52 ;
53 [label=<entropy = 0.0<br/>samples = 5<br/>value = [0, 5]<br/>class = Fraud>, fillcolor="#399de5"] ;
51 -> 53 ;
54 [label=<V11 ≤ -0.482<br/>entropy = 0.115<br/>samples = 194<br/>value = [191, 3]<br/>class = No Fraud>, fillcolor="#e5833c"] ;
50 -> 54 ;
55 [label=<V16 ≤ 0.602<br/>entropy = 0.327<br/>samples = 50<br/>value = [47, 3]<br/>class = No Fraud>, fillcolor="#e78946"] ;
54 -> 55 ;
56 [label=<V4 ≤ -0.616<br/>entropy = 0.845<br/>samples = 11<br/>value = [8, 3]<br/>class = No Fraud>, fillcolor="#efb083"] ;
55 -> 56 ;
57 [label=<entropy = 0.0<br/>samples = 8<br/>value = [8, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
56 -> 57 ;
58 [label=<entropy = 0.0<br/>samples = 3<br/>value = [0, 3]<br/>class = Fraud>, fillcolor="#399de5"] ;
56 -> 58 ;
59 [label=<entropy = 0.0<br/>samples = 39<br/>value = [39, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
55 -> 59 ;
60 [label=<entropy = 0.0<br/>samples = 144<br/>value = [144, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
54 -> 60 ;
61 [label=<V13 ≤ 1.146<br/>entropy = 0.261<br/>samples = 726<br/>value = [32, 694]<br/>class = Fraud>, fillcolor="#42a2e6"] ;
27 -> 61 ;
62 [label=<V11 ≤ -1.276<br/>entropy = 0.218<br/>samples = 718<br/>value = [25, 693]<br/>class = Fraud>, fillcolor="#40a1e6"] ;
61 -> 62 ;
63 [label=<entropy = 0.0<br/>samples = 5<br/>value = [5, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
62 -> 63 ;
64 [label=<V16 ≤ 0.357<br/>entropy = 0.185<br/>samples = 713<br/>value = [20, 693]<br/>class = Fraud>, fillcolor="#3fa0e6"] ;
62 -> 64 ;
65 [label=<entropy = 0.0<br/>samples = 324<br/>value = [0, 324]<br/>class = Fraud>, fillcolor="#399de5"] ;
64 -> 65 ;
66 [label=<V15 ≤ 0.254<br/>entropy = 0.292<br/>samples = 389<br/>value = [20, 369]<br/>class = Fraud>, fillcolor="#44a2e6"] ;
64 -> 66 ;
67 [label=<V26 ≤ -0.613<br/>entropy = 0.164<br/>samples = 374<br/>value = [9, 365]<br/>class = Fraud>, fillcolor="#3e9fe6"] ;
66 -> 67 ;
68 [label=<V3 ≤ -0.059<br/>entropy = 0.946<br/>samples = 11<br/>value = [4, 7]<br/>class = Fraud>, fillcolor="#aad5f4"] ;
67 -> 68 ;
69 [label=<entropy = 0.0<br/>samples = 7<br/>value = [0, 7]<br/>class = Fraud>, fillcolor="#399de5"] ;
68 -> 69 ;
70 [label=<entropy = 0.0<br/>samples = 4<br/>value = [4, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
68 -> 70 ;
71 [label=<V1 ≤ 1.665<br/>entropy = 0.105<br/>samples = 363<br/>value = [5, 358]<br/>class = Fraud>, fillcolor="#3c9ee5"] ;
67 -> 71 ;
72 [label=<V26 ≤ 1.599<br/>entropy = 0.07<br/>samples = 359<br/>value = [3, 356]<br/>class = Fraud>, fillcolor="#3b9ee5"] ;
71 -> 72 ;
73 [label=<V23 ≤ 0.08<br/>entropy = 0.05<br/>samples = 358<br/>value = [2, 356]<br/>class = Fraud>, fillcolor="#3a9ee5"] ;
72 -> 73 ;
74 [label=<entropy = 0.0<br/>samples = 290<br/>value = [0, 290]<br/>class = Fraud>, fillcolor="#399de5"] ;
73 -> 74 ;
75 [label=<V2 ≤ -0.057<br/>entropy = 0.191<br/>samples = 68<br/>value = [2, 66]<br/>class = Fraud>, fillcolor="#3fa0e6"] ;
73 -> 75 ;
76 [label=<V20 ≤ -0.398<br/>entropy = 0.971<br/>samples = 5<br/>value = [2, 3]<br/>class = Fraud>, fillcolor="#bddef6"] ;
75 -> 76 ;
77 [label=<entropy = 0.0<br/>samples = 3<br/>value = [0, 3]<br/>class = Fraud>, fillcolor="#399de5"] ;
76 -> 77 ;
78 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
76 -> 78 ;
79 [label=<entropy = 0.0<br/>samples = 63<br/>value = [0, 63]<br/>class = Fraud>, fillcolor="#399de5"] ;
75 -> 79 ;
80 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
72 -> 80 ;
81 [label=<V12 ≤ 0.487<br/>entropy = 1.0<br/>samples = 4<br/>value = [2, 2]<br/>class = No Fraud>, fillcolor="#ffffff"] ;
71 -> 81 ;
82 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
81 -> 82 ;
83 [label=<entropy = 0.0<br/>samples = 2<br/>value = [0, 2]<br/>class = Fraud>, fillcolor="#399de5"] ;
81 -> 83 ;
84 [label=<V10 ≤ 0.122<br/>entropy = 0.837<br/>samples = 15<br/>value = [11, 4]<br/>class = No Fraud>, fillcolor="#eeaf81"] ;
66 -> 84 ;
85 [label=<V16 ≤ 1.151<br/>entropy = 0.722<br/>samples = 5<br/>value = [1, 4]<br/>class = Fraud>, fillcolor="#6ab6ec"] ;
84 -> 85 ;
86 [label=<entropy = 0.0<br/>samples = 4<br/>value = [0, 4]<br/>class = Fraud>, fillcolor="#399de5"] ;
85 -> 86 ;
87 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
85 -> 87 ;
88 [label=<entropy = 0.0<br/>samples = 10<br/>value = [10, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
84 -> 88 ;
89 [label=<V18 ≤ -0.236<br/>entropy = 0.544<br/>samples = 8<br/>value = [7, 1]<br/>class = No Fraud>, fillcolor="#e99355"] ;
61 -> 89 ;
90 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
89 -> 90 ;
91 [label=<entropy = 0.0<br/>samples = 7<br/>value = [7, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
89 -> 91 ;
92 [label=<V4 ≤ -0.068<br/>entropy = 0.514<br/>samples = 20769<br/>value = [18386, 2383]<br/>class = No Fraud>, fillcolor="#e89153"] ;
0 -> 92 [labeldistance=2.5, labelangle=-45, headlabel="False"] ;
93 [label=<V14 ≤ 0.453<br/>entropy = 0.302<br/>samples = 17438<br/>value = [16501, 937]<br/>class = No Fraud>, fillcolor="#e68844"] ;
92 -> 93 ;
94 [label=<V4 ≤ -0.744<br/>entropy = 0.662<br/>samples = 3787<br/>value = [3136, 651]<br/>class = No Fraud>, fillcolor="#ea9b62"] ;
93 -> 94 ;
95 [label=<V4 ≤ -1.035<br/>entropy = 0.192<br/>samples = 1858<br/>value = [1803, 55]<br/>class = No Fraud>, fillcolor="#e6853f"] ;
94 -> 95 ;
96 [label=<V15 ≤ 1.033<br/>entropy = 0.02<br/>samples = 1057<br/>value = [1055, 2]<br/>class = No Fraud>, fillcolor="#e58139"] ;
95 -> 96 ;
97 [label=<entropy = 0.0<br/>samples = 988<br/>value = [988, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
96 -> 97 ;
98 [label=<V14 ≤ 0.436<br/>entropy = 0.189<br/>samples = 69<br/>value = [67, 2]<br/>class = No Fraud>, fillcolor="#e6853f"] ;
96 -> 98 ;
99 [label=<entropy = 0.0<br/>samples = 63<br/>value = [63, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
98 -> 99 ;
100 [label=<V9 ≤ 0.996<br/>entropy = 0.918<br/>samples = 6<br/>value = [4, 2]<br/>class = No Fraud>, fillcolor="#f2c09c"] ;
98 -> 100 ;
101 [label=<entropy = 0.0<br/>samples = 4<br/>value = [4, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
100 -> 101 ;
102 [label=<entropy = 0.0<br/>samples = 2<br/>value = [0, 2]<br/>class = Fraud>, fillcolor="#399de5"] ;
100 -> 102 ;
103 [label=<V2 ≤ -0.25<br/>entropy = 0.351<br/>samples = 801<br/>value = [748, 53]<br/>class = No Fraud>, fillcolor="#e78a47"] ;
95 -> 103 ;
104 [label=<V2 ≤ -0.745<br/>entropy = 0.536<br/>samples = 433<br/>value = [380, 53]<br/>class = No Fraud>, fillcolor="#e99355"] ;
103 -> 104 ;
105 [label=<entropy = 0.0<br/>samples = 164<br/>value = [164, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
104 -> 105 ;
106 [label=<V8 ≤ -0.123<br/>entropy = 0.716<br/>samples = 269<br/>value = [216, 53]<br/>class = No Fraud>, fillcolor="#eba06a"] ;
104 -> 106 ;
107 [label=<V26 ≤ 0.446<br/>entropy = 0.872<br/>samples = 181<br/>value = [128, 53]<br/>class = No Fraud>, fillcolor="#f0b58b"] ;
106 -> 107 ;
108 [label=<V18 ≤ -0.098<br/>entropy = 0.955<br/>samples = 141<br/>value = [88, 53]<br/>class = No Fraud>, fillcolor="#f5cdb0"] ;
107 -> 108 ;
109 [label=<V28 ≤ -0.81<br/>entropy = 0.323<br/>samples = 34<br/>value = [32, 2]<br/>class = No Fraud>, fillcolor="#e78945"] ;
108 -> 109 ;
110 [label=<entropy = 0.0<br/>samples = 2<br/>value = [0, 2]<br/>class = Fraud>, fillcolor="#399de5"] ;
109 -> 110 ;
111 [label=<entropy = 0.0<br/>samples = 32<br/>value = [32, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
109 -> 111 ;
112 [label=<V28 ≤ -0.144<br/>entropy = 0.998<br/>samples = 107<br/>value = [56, 51]<br/>class = No Fraud>, fillcolor="#fdf4ed"] ;
108 -> 112 ;
113 [label=<V21 ≤ 0.017<br/>entropy = 0.258<br/>samples = 23<br/>value = [22, 1]<br/>class = No Fraud>, fillcolor="#e68742"] ;
112 -> 113 ;
114 [label=<entropy = 0.0<br/>samples = 22<br/>value = [22, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
113 -> 114 ;
115 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
113 -> 115 ;
116 [label=<V1 ≤ 1.828<br/>entropy = 0.974<br/>samples = 84<br/>value = [34, 50]<br/>class = Fraud>, fillcolor="#c0e0f7"] ;
112 -> 116 ;
117 [label=<V28 ≤ 0.307<br/>entropy = 0.909<br/>samples = 74<br/>value = [24, 50]<br/>class = Fraud>, fillcolor="#98ccf1"] ;
116 -> 117 ;
118 [label=<V5 ≤ -0.046<br/>entropy = 0.758<br/>samples = 64<br/>value = [14, 50]<br/>class = Fraud>, fillcolor="#70b8ec"] ;
117 -> 118 ;
119 [label=<entropy = 0.0<br/>samples = 5<br/>value = [5, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
118 -> 119 ;
120 [label=<V13 ≤ -0.655<br/>entropy = 0.616<br/>samples = 59<br/>value = [9, 50]<br/>class = Fraud>, fillcolor="#5dafea"] ;
118 -> 120 ;
121 [label=<entropy = 0.0<br/>samples = 3<br/>value = [3, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
120 -> 121 ;
122 [label=<V8 ≤ -0.13<br/>entropy = 0.491<br/>samples = 56<br/>value = [6, 50]<br/>class = Fraud>, fillcolor="#51a9e8"] ;
120 -> 122 ;
123 [label=<V11 ≤ -1.128<br/>entropy = 0.381<br/>samples = 54<br/>value = [4, 50]<br/>class = Fraud>, fillcolor="#49a5e7"] ;
122 -> 123 ;
124 [label=<V23 ≤ 0.3<br/>entropy = 0.918<br/>samples = 3<br/>value = [2, 1]<br/>class = No Fraud>, fillcolor="#f2c09c"] ;
123 -> 124 ;
125 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
124 -> 125 ;
126 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
124 -> 126 ;
127 [label=<V2 ≤ -0.708<br/>entropy = 0.239<br/>samples = 51<br/>value = [2, 49]<br/>class = Fraud>, fillcolor="#41a1e6"] ;
123 -> 127 ;
128 [label=<V9 ≤ 0.329<br/>entropy = 0.764<br/>samples = 9<br/>value = [2, 7]<br/>class = Fraud>, fillcolor="#72b9ec"] ;
127 -> 128 ;
129 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
128 -> 129 ;
130 [label=<entropy = 0.0<br/>samples = 7<br/>value = [0, 7]<br/>class = Fraud>, fillcolor="#399de5"] ;
128 -> 130 ;
131 [label=<entropy = 0.0<br/>samples = 42<br/>value = [0, 42]<br/>class = Fraud>, fillcolor="#399de5"] ;
127 -> 131 ;
132 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
122 -> 132 ;
133 [label=<entropy = 0.0<br/>samples = 10<br/>value = [10, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
117 -> 133 ;
134 [label=<entropy = 0.0<br/>samples = 10<br/>value = [10, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
116 -> 134 ;
135 [label=<entropy = 0.0<br/>samples = 40<br/>value = [40, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
107 -> 135 ;
136 [label=<entropy = 0.0<br/>samples = 88<br/>value = [88, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
106 -> 136 ;
137 [label=<entropy = 0.0<br/>samples = 368<br/>value = [368, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
103 -> 137 ;
138 [label=<V1 ≤ 1.58<br/>entropy = 0.892<br/>samples = 1929<br/>value = [1333, 596]<br/>class = No Fraud>, fillcolor="#f1b992"] ;
94 -> 138 ;
139 [label=<V3 ≤ 1.264<br/>entropy = 0.962<br/>samples = 1490<br/>value = [915, 575]<br/>class = No Fraud>, fillcolor="#f5d0b5"] ;
138 -> 139 ;
140 [label=<V5 ≤ 0.756<br/>entropy = 0.997<br/>samples = 1217<br/>value = [647, 570]<br/>class = No Fraud>, fillcolor="#fcf0e7"] ;
139 -> 140 ;
141 [label=<V13 ≤ 0.176<br/>entropy = 0.998<br/>samples = 1058<br/>value = [504, 554]<br/>class = Fraud>, fillcolor="#edf6fd"] ;
140 -> 141 ;
142 [label=<V9 ≤ 1.23<br/>entropy = 0.943<br/>samples = 699<br/>value = [252, 447]<br/>class = Fraud>, fillcolor="#a9d4f4"] ;
141 -> 142 ;
143 [label=<V4 ≤ -0.578<br/>entropy = 0.89<br/>samples = 641<br/>value = [197, 444]<br/>class = Fraud>, fillcolor="#91c8f1"] ;
142 -> 143 ;
144 [label=<V14 ≤ 0.262<br/>entropy = 0.837<br/>samples = 75<br/>value = [55, 20]<br/>class = No Fraud>, fillcolor="#eeaf81"] ;
143 -> 144 ;
145 [label=<V5 ≤ 0.296<br/>entropy = 0.99<br/>samples = 34<br/>value = [15, 19]<br/>class = Fraud>, fillcolor="#d5eafa"] ;
144 -> 145 ;
146 [label=<V15 ≤ -0.806<br/>entropy = 0.454<br/>samples = 21<br/>value = [2, 19]<br/>class = Fraud>, fillcolor="#4ea7e8"] ;
145 -> 146 ;
147 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
146 -> 147 ;
148 [label=<entropy = 0.0<br/>samples = 19<br/>value = [0, 19]<br/>class = Fraud>, fillcolor="#399de5"] ;
146 -> 148 ;
149 [label=<entropy = 0.0<br/>samples = 13<br/>value = [13, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
145 -> 149 ;
150 [label=<V15 ≤ 1.46<br/>entropy = 0.165<br/>samples = 41<br/>value = [40, 1]<br/>class = No Fraud>, fillcolor="#e6843e"] ;
144 -> 150 ;
151 [label=<entropy = 0.0<br/>samples = 39<br/>value = [39, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
150 -> 151 ;
152 [label=<V8 ≤ -0.162<br/>entropy = 1.0<br/>samples = 2<br/>value = [1, 1]<br/>class = No Fraud>, fillcolor="#ffffff"] ;
150 -> 152 ;
153 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
152 -> 153 ;
154 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
152 -> 154 ;
155 [label=<V1 ≤ 0.935<br/>entropy = 0.813<br/>samples = 566<br/>value = [142, 424]<br/>class = Fraud>, fillcolor="#7bbeee"] ;
143 -> 155 ;
156 [label=<V8 ≤ -0.116<br/>entropy = 0.609<br/>samples = 401<br/>value = [60, 341]<br/>class = Fraud>, fillcolor="#5caeea"] ;
155 -> 156 ;
157 [label=<V20 ≤ -0.134<br/>entropy = 0.443<br/>samples = 348<br/>value = [32, 316]<br/>class = Fraud>, fillcolor="#4da7e8"] ;
156 -> 157 ;
158 [label=<V6 ≤ -0.33<br/>entropy = 0.249<br/>samples = 265<br/>value = [11, 254]<br/>class = Fraud>, fillcolor="#42a1e6"] ;
157 -> 158 ;
159 [label=<V3 ≤ 0.388<br/>entropy = 0.811<br/>samples = 4<br/>value = [3, 1]<br/>class = No Fraud>, fillcolor="#eeab7b"] ;
158 -> 159 ;
160 [label=<entropy = 0.0<br/>samples = 3<br/>value = [3, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
159 -> 160 ;
161 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
159 -> 161 ;
162 [label=<V15 ≤ -1.07<br/>entropy = 0.198<br/>samples = 261<br/>value = [8, 253]<br/>class = Fraud>, fillcolor="#3fa0e6"] ;
158 -> 162 ;
163 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
162 -> 163 ;
164 [label=<V28 ≤ -0.506<br/>entropy = 0.159<br/>samples = 259<br/>value = [6, 253]<br/>class = Fraud>, fillcolor="#3e9fe6"] ;
162 -> 164 ;
165 [label=<V22 ≤ 0.203<br/>entropy = 0.845<br/>samples = 11<br/>value = [3, 8]<br/>class = Fraud>, fillcolor="#83c2ef"] ;
164 -> 165 ;
166 [label=<entropy = 0.0<br/>samples = 3<br/>value = [3, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
165 -> 166 ;
167 [label=<entropy = 0.0<br/>samples = 8<br/>value = [0, 8]<br/>class = Fraud>, fillcolor="#399de5"] ;
165 -> 167 ;
168 [label=<V17 ≤ 0.936<br/>entropy = 0.094<br/>samples = 248<br/>value = [3, 245]<br/>class = Fraud>, fillcolor="#3b9ee5"] ;
164 -> 168 ;
169 [label=<entropy = 0.0<br/>samples = 202<br/>value = [0, 202]<br/>class = Fraud>, fillcolor="#399de5"] ;
168 -> 169 ;
170 [label=<V16 ≤ 0.374<br/>entropy = 0.348<br/>samples = 46<br/>value = [3, 43]<br/>class = Fraud>, fillcolor="#47a4e7"] ;
168 -> 170 ;
171 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
170 -> 171 ;
172 [label=<V9 ≤ 0.196<br/>entropy = 0.156<br/>samples = 44<br/>value = [1, 43]<br/>class = Fraud>, fillcolor="#3e9fe6"] ;
170 -> 172 ;
173 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
172 -> 173 ;
174 [label=<entropy = 0.0<br/>samples = 43<br/>value = [0, 43]<br/>class = Fraud>, fillcolor="#399de5"] ;
172 -> 174 ;
175 [label=<V2 ≤ -0.099<br/>entropy = 0.816<br/>samples = 83<br/>value = [21, 62]<br/>class = Fraud>, fillcolor="#7cbeee"] ;
157 -> 175 ;
176 [label=<V11 ≤ -1.164<br/>entropy = 0.581<br/>samples = 72<br/>value = [10, 62]<br/>class = Fraud>, fillcolor="#59ade9"] ;
175 -> 176 ;
177 [label=<entropy = 0.0<br/>samples = 6<br/>value = [6, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
176 -> 177 ;
178 [label=<V22 ≤ 0.792<br/>entropy = 0.33<br/>samples = 66<br/>value = [4, 62]<br/>class = Fraud>, fillcolor="#46a3e7"] ;
176 -> 178 ;
179 [label=<V23 ≤ -0.366<br/>entropy = 0.201<br/>samples = 64<br/>value = [2, 62]<br/>class = Fraud>, fillcolor="#3fa0e6"] ;
178 -> 179 ;
180 [label=<V10 ≤ 0.12<br/>entropy = 0.971<br/>samples = 5<br/>value = [2, 3]<br/>class = Fraud>, fillcolor="#bddef6"] ;
179 -> 180 ;
181 [label=<entropy = 0.0<br/>samples = 3<br/>value = [0, 3]<br/>class = Fraud>, fillcolor="#399de5"] ;
180 -> 181 ;
182 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
180 -> 182 ;
183 [label=<entropy = 0.0<br/>samples = 59<br/>value = [0, 59]<br/>class = Fraud>, fillcolor="#399de5"] ;
179 -> 183 ;
184 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
178 -> 184 ;
185 [label=<entropy = 0.0<br/>samples = 11<br/>value = [11, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
175 -> 185 ;
186 [label=<V17 ≤ 0.461<br/>entropy = 0.998<br/>samples = 53<br/>value = [28, 25]<br/>class = No Fraud>, fillcolor="#fcf2ea"] ;
156 -> 186 ;
187 [label=<entropy = 0.0<br/>samples = 25<br/>value = [0, 25]<br/>class = Fraud>, fillcolor="#399de5"] ;
186 -> 187 ;
188 [label=<entropy = 0.0<br/>samples = 28<br/>value = [28, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
186 -> 188 ;
189 [label=<V27 ≤ -0.27<br/>entropy = 1.0<br/>samples = 165<br/>value = [82, 83]<br/>class = Fraud>, fillcolor="#fdfeff"] ;
155 -> 189 ;
190 [label=<entropy = 0.0<br/>samples = 33<br/>value = [33, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
189 -> 190 ;
191 [label=<V8 ≤ -0.147<br/>entropy = 0.952<br/>samples = 132<br/>value = [49, 83]<br/>class = Fraud>, fillcolor="#aed7f4"] ;
189 -> 191 ;
192 [label=<V28 ≤ 0.009<br/>entropy = 0.844<br/>samples = 114<br/>value = [31, 83]<br/>class = Fraud>, fillcolor="#83c2ef"] ;
191 -> 192 ;
193 [label=<V26 ≤ 0.812<br/>entropy = 0.621<br/>samples = 97<br/>value = [15, 82]<br/>class = Fraud>, fillcolor="#5dafea"] ;
192 -> 193 ;
194 [label=<V18 ≤ 0.84<br/>entropy = 0.433<br/>samples = 90<br/>value = [8, 82]<br/>class = Fraud>, fillcolor="#4ca7e8"] ;
193 -> 194 ;
195 [label=<V25 ≤ 0.143<br/>entropy = 0.174<br/>samples = 77<br/>value = [2, 75]<br/>class = Fraud>, fillcolor="#3ea0e6"] ;
194 -> 195 ;
196 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
195 -> 196 ;
197 [label=<entropy = 0.0<br/>samples = 75<br/>value = [0, 75]<br/>class = Fraud>, fillcolor="#399de5"] ;
195 -> 197 ;
198 [label=<V2 ≤ -0.341<br/>entropy = 0.996<br/>samples = 13<br/>value = [6, 7]<br/>class = Fraud>, fillcolor="#e3f1fb"] ;
194 -> 198 ;
199 [label=<entropy = 0.0<br/>samples = 7<br/>value = [0, 7]<br/>class = Fraud>, fillcolor="#399de5"] ;
198 -> 199 ;
200 [label=<entropy = 0.0<br/>samples = 6<br/>value = [6, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
198 -> 200 ;
201 [label=<entropy = 0.0<br/>samples = 7<br/>value = [7, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
193 -> 201 ;
202 [label=<V20 ≤ 0.051<br/>entropy = 0.323<br/>samples = 17<br/>value = [16, 1]<br/>class = No Fraud>, fillcolor="#e78945"] ;
192 -> 202 ;
203 [label=<entropy = 0.0<br/>samples = 16<br/>value = [16, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
202 -> 203 ;
204 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
202 -> 204 ;
205 [label=<entropy = 0.0<br/>samples = 18<br/>value = [18, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
191 -> 205 ;
206 [label=<V8 ≤ -1.14<br/>entropy = 0.294<br/>samples = 58<br/>value = [55, 3]<br/>class = No Fraud>, fillcolor="#e68844"] ;
142 -> 206 ;
207 [label=<entropy = 0.0<br/>samples = 3<br/>value = [0, 3]<br/>class = Fraud>, fillcolor="#399de5"] ;
206 -> 207 ;
208 [label=<entropy = 0.0<br/>samples = 55<br/>value = [55, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
206 -> 208 ;
209 [label=<V23 ≤ 0.756<br/>entropy = 0.879<br/>samples = 359<br/>value = [252, 107]<br/>class = No Fraud>, fillcolor="#f0b68d"] ;
141 -> 209 ;
210 [label=<V2 ≤ -0.253<br/>entropy = 0.782<br/>samples = 323<br/>value = [248, 75]<br/>class = No Fraud>, fillcolor="#eda775"] ;
209 -> 210 ;
211 [label=<V5 ≤ -0.001<br/>entropy = 0.955<br/>samples = 197<br/>value = [123, 74]<br/>class = No Fraud>, fillcolor="#f5cdb0"] ;
210 -> 211 ;
212 [label=<entropy = 0.0<br/>samples = 43<br/>value = [43, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
211 -> 212 ;
213 [label=<V4 ≤ -0.431<br/>entropy = 0.999<br/>samples = 154<br/>value = [80, 74]<br/>class = No Fraud>, fillcolor="#fdf6f0"] ;
211 -> 213 ;
214 [label=<V15 ≤ -0.166<br/>entropy = 0.805<br/>samples = 69<br/>value = [17, 52]<br/>class = Fraud>, fillcolor="#7abdee"] ;
213 -> 214 ;
215 [label=<V11 ≤ -1.072<br/>entropy = 0.485<br/>samples = 57<br/>value = [6, 51]<br/>class = Fraud>, fillcolor="#50a9e8"] ;
214 -> 215 ;
216 [label=<V9 ≤ 0.796<br/>entropy = 0.985<br/>samples = 7<br/>value = [4, 3]<br/>class = No Fraud>, fillcolor="#f8e0ce"] ;
215 -> 216 ;
217 [label=<entropy = 0.0<br/>samples = 3<br/>value = [0, 3]<br/>class = Fraud>, fillcolor="#399de5"] ;
216 -> 217 ;
218 [label=<entropy = 0.0<br/>samples = 4<br/>value = [4, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
216 -> 218 ;
219 [label=<V22 ≤ -0.435<br/>entropy = 0.242<br/>samples = 50<br/>value = [2, 48]<br/>class = Fraud>, fillcolor="#41a1e6"] ;
215 -> 219 ;
220 [label=<V2 ≤ -0.56<br/>entropy = 0.764<br/>samples = 9<br/>value = [2, 7]<br/>class = Fraud>, fillcolor="#72b9ec"] ;
219 -> 220 ;
221 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
220 -> 221 ;
222 [label=<entropy = 0.0<br/>samples = 7<br/>value = [0, 7]<br/>class = Fraud>, fillcolor="#399de5"] ;
220 -> 222 ;
223 [label=<entropy = 0.0<br/>samples = 41<br/>value = [0, 41]<br/>class = Fraud>, fillcolor="#399de5"] ;
219 -> 223 ;
224 [label=<V13 ≤ 0.281<br/>entropy = 0.414<br/>samples = 12<br/>value = [11, 1]<br/>class = No Fraud>, fillcolor="#e78c4b"] ;
214 -> 224 ;
225 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
224 -> 225 ;
226 [label=<entropy = 0.0<br/>samples = 11<br/>value = [11, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
224 -> 226 ;
227 [label=<V14 ≤ 0.09<br/>entropy = 0.825<br/>samples = 85<br/>value = [63, 22]<br/>class = No Fraud>, fillcolor="#eead7e"] ;
213 -> 227 ;
228 [label=<V25 ≤ 0.357<br/>entropy = 0.951<br/>samples = 27<br/>value = [10, 17]<br/>class = Fraud>, fillcolor="#add7f4"] ;
227 -> 228 ;
229 [label=<V26 ≤ -0.793<br/>entropy = 0.485<br/>samples = 19<br/>value = [2, 17]<br/>class = Fraud>, fillcolor="#50a9e8"] ;
228 -> 229 ;
230 [label=<V28 ≤ 0.03<br/>entropy = 0.918<br/>samples = 3<br/>value = [2, 1]<br/>class = No Fraud>, fillcolor="#f2c09c"] ;
229 -> 230 ;
231 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
230 -> 231 ;
232 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
230 -> 232 ;
233 [label=<entropy = 0.0<br/>samples = 16<br/>value = [0, 16]<br/>class = Fraud>, fillcolor="#399de5"] ;
229 -> 233 ;
234 [label=<entropy = 0.0<br/>samples = 8<br/>value = [8, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
228 -> 234 ;
235 [label=<V20 ≤ -0.25<br/>entropy = 0.424<br/>samples = 58<br/>value = [53, 5]<br/>class = No Fraud>, fillcolor="#e78d4c"] ;
227 -> 235 ;
236 [label=<V8 ≤ -0.17<br/>entropy = 1.0<br/>samples = 10<br/>value = [5, 5]<br/>class = No Fraud>, fillcolor="#ffffff"] ;
235 -> 236 ;
237 [label=<entropy = 0.0<br/>samples = 5<br/>value = [0, 5]<br/>class = Fraud>, fillcolor="#399de5"] ;
236 -> 237 ;
238 [label=<entropy = 0.0<br/>samples = 5<br/>value = [5, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
236 -> 238 ;
239 [label=<entropy = 0.0<br/>samples = 48<br/>value = [48, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
235 -> 239 ;
240 [label=<V14 ≤ -0.049<br/>entropy = 0.067<br/>samples = 126<br/>value = [125, 1]<br/>class = No Fraud>, fillcolor="#e5823b"] ;
210 -> 240 ;
241 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
240 -> 241 ;
242 [label=<entropy = 0.0<br/>samples = 125<br/>value = [125, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
240 -> 242 ;
243 [label=<V5 ≤ 0.266<br/>entropy = 0.503<br/>samples = 36<br/>value = [4, 32]<br/>class = Fraud>, fillcolor="#52a9e8"] ;
209 -> 243 ;
244 [label=<entropy = 0.0<br/>samples = 4<br/>value = [4, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
243 -> 244 ;
245 [label=<entropy = 0.0<br/>samples = 32<br/>value = [0, 32]<br/>class = Fraud>, fillcolor="#399de5"] ;
243 -> 245 ;
246 [label=<V27 ≤ 0.493<br/>entropy = 0.471<br/>samples = 159<br/>value = [143, 16]<br/>class = No Fraud>, fillcolor="#e88f4f"] ;
140 -> 246 ;
247 [label=<V1 ≤ 0.522<br/>entropy = 0.251<br/>samples = 143<br/>value = [137, 6]<br/>class = No Fraud>, fillcolor="#e68742"] ;
246 -> 247 ;
248 [label=<entropy = 0.0<br/>samples = 117<br/>value = [117, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
247 -> 248 ;
249 [label=<V6 ≤ 0.43<br/>entropy = 0.779<br/>samples = 26<br/>value = [20, 6]<br/>class = No Fraud>, fillcolor="#eda774"] ;
247 -> 249 ;
250 [label=<V8 ≤ -0.532<br/>entropy = 0.592<br/>samples = 7<br/>value = [1, 6]<br/>class = Fraud>, fillcolor="#5aade9"] ;
249 -> 250 ;
251 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
250 -> 251 ;
252 [label=<entropy = 0.0<br/>samples = 6<br/>value = [0, 6]<br/>class = Fraud>, fillcolor="#399de5"] ;
250 -> 252 ;
253 [label=<entropy = 0.0<br/>samples = 19<br/>value = [19, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
249 -> 253 ;
254 [label=<V14 ≤ 0.336<br/>entropy = 0.954<br/>samples = 16<br/>value = [6, 10]<br/>class = Fraud>, fillcolor="#b0d8f5"] ;
246 -> 254 ;
255 [label=<V12 ≤ 0.128<br/>entropy = 0.592<br/>samples = 7<br/>value = [6, 1]<br/>class = No Fraud>, fillcolor="#e9965a"] ;
254 -> 255 ;
256 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
255 -> 256 ;
257 [label=<entropy = 0.0<br/>samples = 6<br/>value = [6, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
255 -> 257 ;
258 [label=<entropy = 0.0<br/>samples = 9<br/>value = [0, 9]<br/>class = Fraud>, fillcolor="#399de5"] ;
254 -> 258 ;
259 [label=<V11 ≤ -0.31<br/>entropy = 0.132<br/>samples = 273<br/>value = [268, 5]<br/>class = No Fraud>, fillcolor="#e5833d"] ;
139 -> 259 ;
260 [label=<entropy = 0.0<br/>samples = 237<br/>value = [237, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
259 -> 260 ;
261 [label=<V21 ≤ -0.064<br/>entropy = 0.581<br/>samples = 36<br/>value = [31, 5]<br/>class = No Fraud>, fillcolor="#e99559"] ;
259 -> 261 ;
262 [label=<entropy = 0.0<br/>samples = 26<br/>value = [26, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
261 -> 262 ;
263 [label=<V27 ≤ -0.153<br/>entropy = 1.0<br/>samples = 10<br/>value = [5, 5]<br/>class = No Fraud>, fillcolor="#ffffff"] ;
261 -> 263 ;
264 [label=<entropy = 0.0<br/>samples = 5<br/>value = [0, 5]<br/>class = Fraud>, fillcolor="#399de5"] ;
263 -> 264 ;
265 [label=<entropy = 0.0<br/>samples = 5<br/>value = [5, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
263 -> 265 ;
266 [label=<V25 ≤ 1.163<br/>entropy = 0.277<br/>samples = 439<br/>value = [418, 21]<br/>class = No Fraud>, fillcolor="#e68743"] ;
138 -> 266 ;
267 [label=<V19 ≤ 0.265<br/>entropy = 0.148<br/>samples = 425<br/>value = [416, 9]<br/>class = No Fraud>, fillcolor="#e6843d"] ;
266 -> 267 ;
268 [label=<V20 ≤ -0.148<br/>entropy = 0.062<br/>samples = 412<br/>value = [409, 3]<br/>class = No Fraud>, fillcolor="#e5823a"] ;
267 -> 268 ;
269 [label=<V4 ≤ -0.075<br/>entropy = 0.025<br/>samples = 402<br/>value = [401, 1]<br/>class = No Fraud>, fillcolor="#e58139"] ;
268 -> 269 ;
270 [label=<entropy = 0.0<br/>samples = 399<br/>value = [399, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
269 -> 270 ;
271 [label=<V15 ≤ -1.263<br/>entropy = 0.918<br/>samples = 3<br/>value = [2, 1]<br/>class = No Fraud>, fillcolor="#f2c09c"] ;
269 -> 271 ;
272 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
271 -> 272 ;
273 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
271 -> 273 ;
274 [label=<V17 ≤ 0.827<br/>entropy = 0.722<br/>samples = 10<br/>value = [8, 2]<br/>class = No Fraud>, fillcolor="#eca06a"] ;
268 -> 274 ;
275 [label=<entropy = 0.0<br/>samples = 8<br/>value = [8, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
274 -> 275 ;
276 [label=<entropy = 0.0<br/>samples = 2<br/>value = [0, 2]<br/>class = Fraud>, fillcolor="#399de5"] ;
274 -> 276 ;
277 [label=<V18 ≤ 1.157<br/>entropy = 0.996<br/>samples = 13<br/>value = [7, 6]<br/>class = No Fraud>, fillcolor="#fbede3"] ;
267 -> 277 ;
278 [label=<entropy = 0.0<br/>samples = 7<br/>value = [7, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
277 -> 278 ;
279 [label=<entropy = 0.0<br/>samples = 6<br/>value = [0, 6]<br/>class = Fraud>, fillcolor="#399de5"] ;
277 -> 279 ;
280 [label=<V3 ≤ -0.447<br/>entropy = 0.592<br/>samples = 14<br/>value = [2, 12]<br/>class = Fraud>, fillcolor="#5aade9"] ;
266 -> 280 ;
281 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
280 -> 281 ;
282 [label=<entropy = 0.0<br/>samples = 12<br/>value = [0, 12]<br/>class = Fraud>, fillcolor="#399de5"] ;
280 -> 282 ;
283 [label=<V7 ≤ 1.658<br/>entropy = 0.147<br/>samples = 13651<br/>value = [13365, 286]<br/>class = No Fraud>, fillcolor="#e6843d"] ;
93 -> 283 ;
284 [label=<V21 ≤ 0.109<br/>entropy = 0.119<br/>samples = 13450<br/>value = [13233, 217]<br/>class = No Fraud>, fillcolor="#e5833c"] ;
283 -> 284 ;
285 [label=<V14 ≤ 0.712<br/>entropy = 0.088<br/>samples = 12760<br/>value = [12619, 141]<br/>class = No Fraud>, fillcolor="#e5823b"] ;
284 -> 285 ;
286 [label=<V18 ≤ 0.133<br/>entropy = 0.182<br/>samples = 4541<br/>value = [4416, 125]<br/>class = No Fraud>, fillcolor="#e6853f"] ;
285 -> 286 ;
287 [label=<entropy = 0.0<br/>samples = 1619<br/>value = [1619, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
286 -> 287 ;
288 [label=<V8 ≤ -0.177<br/>entropy = 0.255<br/>samples = 2922<br/>value = [2797, 125]<br/>class = No Fraud>, fillcolor="#e68742"] ;
286 -> 288 ;
289 [label=<V4 ≤ -0.935<br/>entropy = 0.499<br/>samples = 911<br/>value = [811, 100]<br/>class = No Fraud>, fillcolor="#e89151"] ;
288 -> 289 ;
290 [label=<entropy = 0.0<br/>samples = 398<br/>value = [398, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
289 -> 290 ;
291 [label=<V1 ≤ 1.072<br/>entropy = 0.712<br/>samples = 513<br/>value = [413, 100]<br/>class = No Fraud>, fillcolor="#eba069"] ;
289 -> 291 ;
292 [label=<V24 ≤ -0.345<br/>entropy = 0.846<br/>samples = 366<br/>value = [266, 100]<br/>class = No Fraud>, fillcolor="#efb083"] ;
291 -> 292 ;
293 [label=<V3 ≤ 0.848<br/>entropy = 0.995<br/>samples = 138<br/>value = [63, 75]<br/>class = Fraud>, fillcolor="#dfeffb"] ;
292 -> 293 ;
294 [label=<V3 ≤ 0.182<br/>entropy = 0.777<br/>samples = 96<br/>value = [22, 74]<br/>class = Fraud>, fillcolor="#74baed"] ;
293 -> 294 ;
295 [label=<entropy = 0.0<br/>samples = 9<br/>value = [9, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
294 -> 295 ;
296 [label=<V9 ≤ 0.577<br/>entropy = 0.608<br/>samples = 87<br/>value = [13, 74]<br/>class = Fraud>, fillcolor="#5caeea"] ;
294 -> 296 ;
297 [label=<V21 ≤ -0.111<br/>entropy = 0.31<br/>samples = 72<br/>value = [4, 68]<br/>class = Fraud>, fillcolor="#45a3e7"] ;
296 -> 297 ;
298 [label=<entropy = 0.0<br/>samples = 64<br/>value = [0, 64]<br/>class = Fraud>, fillcolor="#399de5"] ;
297 -> 298 ;
299 [label=<V20 ≤ 0.308<br/>entropy = 1.0<br/>samples = 8<br/>value = [4, 4]<br/>class = No Fraud>, fillcolor="#ffffff"] ;
297 -> 299 ;
300 [label=<entropy = 0.0<br/>samples = 4<br/>value = [4, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
299 -> 300 ;
301 [label=<entropy = 0.0<br/>samples = 4<br/>value = [0, 4]<br/>class = Fraud>, fillcolor="#399de5"] ;
299 -> 301 ;
302 [label=<V25 ≤ 0.558<br/>entropy = 0.971<br/>samples = 15<br/>value = [9, 6]<br/>class = No Fraud>, fillcolor="#f6d5bd"] ;
296 -> 302 ;
303 [label=<entropy = 0.0<br/>samples = 8<br/>value = [8, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
302 -> 303 ;
304 [label=<V1 ≤ 0.921<br/>entropy = 0.592<br/>samples = 7<br/>value = [1, 6]<br/>class = Fraud>, fillcolor="#5aade9"] ;
302 -> 304 ;
305 [label=<entropy = 0.0<br/>samples = 6<br/>value = [0, 6]<br/>class = Fraud>, fillcolor="#399de5"] ;
304 -> 305 ;
306 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
304 -> 306 ;
307 [label=<V15 ≤ 2.749<br/>entropy = 0.162<br/>samples = 42<br/>value = [41, 1]<br/>class = No Fraud>, fillcolor="#e6843e"] ;
293 -> 307 ;
308 [label=<entropy = 0.0<br/>samples = 41<br/>value = [41, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
307 -> 308 ;
309 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
307 -> 309 ;
310 [label=<V21 ≤ -0.109<br/>entropy = 0.499<br/>samples = 228<br/>value = [203, 25]<br/>class = No Fraud>, fillcolor="#e89151"] ;
292 -> 310 ;
311 [label=<V27 ≤ 0.301<br/>entropy = 0.103<br/>samples = 148<br/>value = [146, 2]<br/>class = No Fraud>, fillcolor="#e5833c"] ;
310 -> 311 ;
312 [label=<entropy = 0.0<br/>samples = 146<br/>value = [146, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
311 -> 312 ;
313 [label=<entropy = 0.0<br/>samples = 2<br/>value = [0, 2]<br/>class = Fraud>, fillcolor="#399de5"] ;
311 -> 313 ;
314 [label=<V20 ≤ -0.176<br/>entropy = 0.865<br/>samples = 80<br/>value = [57, 23]<br/>class = No Fraud>, fillcolor="#efb489"] ;
310 -> 314 ;
315 [label=<V1 ≤ 0.078<br/>entropy = 0.977<br/>samples = 34<br/>value = [14, 20]<br/>class = Fraud>, fillcolor="#c4e2f7"] ;
314 -> 315 ;
316 [label=<V4 ≤ -0.394<br/>entropy = 0.592<br/>samples = 14<br/>value = [12, 2]<br/>class = No Fraud>, fillcolor="#e9965a"] ;
315 -> 316 ;
317 [label=<entropy = 0.0<br/>samples = 12<br/>value = [12, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
316 -> 317 ;
318 [label=<entropy = 0.0<br/>samples = 2<br/>value = [0, 2]<br/>class = Fraud>, fillcolor="#399de5"] ;
316 -> 318 ;
319 [label=<V14 ≤ 0.655<br/>entropy = 0.469<br/>samples = 20<br/>value = [2, 18]<br/>class = Fraud>, fillcolor="#4fa8e8"] ;
315 -> 319 ;
320 [label=<entropy = 0.0<br/>samples = 18<br/>value = [0, 18]<br/>class = Fraud>, fillcolor="#399de5"] ;
319 -> 320 ;
321 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
319 -> 321 ;
322 [label=<V9 ≤ 0.108<br/>entropy = 0.348<br/>samples = 46<br/>value = [43, 3]<br/>class = No Fraud>, fillcolor="#e78a47"] ;
314 -> 322 ;
323 [label=<V11 ≤ -0.559<br/>entropy = 0.971<br/>samples = 5<br/>value = [2, 3]<br/>class = Fraud>, fillcolor="#bddef6"] ;
322 -> 323 ;
324 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
323 -> 324 ;
325 [label=<entropy = 0.0<br/>samples = 3<br/>value = [0, 3]<br/>class = Fraud>, fillcolor="#399de5"] ;
323 -> 325 ;
326 [label=<entropy = 0.0<br/>samples = 41<br/>value = [41, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
322 -> 326 ;
327 [label=<entropy = 0.0<br/>samples = 147<br/>value = [147, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
291 -> 327 ;
328 [label=<V4 ≤ -0.416<br/>entropy = 0.097<br/>samples = 2011<br/>value = [1986, 25]<br/>class = No Fraud>, fillcolor="#e5833b"] ;
288 -> 328 ;
329 [label=<V10 ≤ 0.18<br/>entropy = 0.041<br/>samples = 1601<br/>value = [1594, 7]<br/>class = No Fraud>, fillcolor="#e5823a"] ;
328 -> 329 ;
330 [label=<V8 ≤ -0.162<br/>entropy = 0.361<br/>samples = 102<br/>value = [95, 7]<br/>class = No Fraud>, fillcolor="#e78a48"] ;
329 -> 330 ;
331 [label=<V4 ≤ -1.348<br/>entropy = 0.949<br/>samples = 19<br/>value = [12, 7]<br/>class = No Fraud>, fillcolor="#f4caac"] ;
330 -> 331 ;
332 [label=<entropy = 0.0<br/>samples = 11<br/>value = [11, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
331 -> 332 ;
333 [label=<V19 ≤ 0.308<br/>entropy = 0.544<br/>samples = 8<br/>value = [1, 7]<br/>class = Fraud>, fillcolor="#55abe9"] ;
331 -> 333 ;
334 [label=<entropy = 0.0<br/>samples = 7<br/>value = [0, 7]<br/>class = Fraud>, fillcolor="#399de5"] ;
333 -> 334 ;
335 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
333 -> 335 ;
336 [label=<entropy = 0.0<br/>samples = 83<br/>value = [83, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
330 -> 336 ;
337 [label=<entropy = 0.0<br/>samples = 1499<br/>value = [1499, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
329 -> 337 ;
338 [label=<V7 ≤ 0.556<br/>entropy = 0.26<br/>samples = 410<br/>value = [392, 18]<br/>class = No Fraud>, fillcolor="#e68742"] ;
328 -> 338 ;
339 [label=<V15 ≤ -1.087<br/>entropy = 0.125<br/>samples = 351<br/>value = [345, 6]<br/>class = No Fraud>, fillcolor="#e5833c"] ;
338 -> 339 ;
340 [label=<V1 ≤ 0.978<br/>entropy = 0.61<br/>samples = 40<br/>value = [34, 6]<br/>class = No Fraud>, fillcolor="#ea975c"] ;
339 -> 340 ;
341 [label=<entropy = 0.0<br/>samples = 25<br/>value = [25, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
340 -> 341 ;
342 [label=<V17 ≤ 0.376<br/>entropy = 0.971<br/>samples = 15<br/>value = [9, 6]<br/>class = No Fraud>, fillcolor="#f6d5bd"] ;
340 -> 342 ;
343 [label=<entropy = 0.0<br/>samples = 9<br/>value = [9, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
342 -> 343 ;
344 [label=<entropy = 0.0<br/>samples = 6<br/>value = [0, 6]<br/>class = Fraud>, fillcolor="#399de5"] ;
342 -> 344 ;
345 [label=<entropy = 0.0<br/>samples = 311<br/>value = [311, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
339 -> 345 ;
346 [label=<V23 ≤ 0.078<br/>entropy = 0.729<br/>samples = 59<br/>value = [47, 12]<br/>class = No Fraud>, fillcolor="#eca16c"] ;
338 -> 346 ;
347 [label=<V12 ≤ 0.16<br/>entropy = 0.262<br/>samples = 45<br/>value = [43, 2]<br/>class = No Fraud>, fillcolor="#e68742"] ;
346 -> 347 ;
348 [label=<entropy = 0.0<br/>samples = 2<br/>value = [0, 2]<br/>class = Fraud>, fillcolor="#399de5"] ;
347 -> 348 ;
349 [label=<entropy = 0.0<br/>samples = 43<br/>value = [43, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
347 -> 349 ;
350 [label=<V13 ≤ 0.141<br/>entropy = 0.863<br/>samples = 14<br/>value = [4, 10]<br/>class = Fraud>, fillcolor="#88c4ef"] ;
346 -> 350 ;
351 [label=<entropy = 0.0<br/>samples = 10<br/>value = [0, 10]<br/>class = Fraud>, fillcolor="#399de5"] ;
350 -> 351 ;
352 [label=<entropy = 0.0<br/>samples = 4<br/>value = [4, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
350 -> 352 ;
353 [label=<V23 ≤ 0.67<br/>entropy = 0.02<br/>samples = 8219<br/>value = [8203, 16]<br/>class = No Fraud>, fillcolor="#e58139"] ;
285 -> 353 ;
354 [label=<V8 ≤ -0.255<br/>entropy = 0.006<br/>samples = 8028<br/>value = [8024, 4]<br/>class = No Fraud>, fillcolor="#e58139"] ;
353 -> 354 ;
355 [label=<V24 ≤ -1.922<br/>entropy = 0.048<br/>samples = 741<br/>value = [737, 4]<br/>class = No Fraud>, fillcolor="#e5823a"] ;
354 -> 355 ;
356 [label=<V17 ≤ 0.86<br/>entropy = 0.575<br/>samples = 22<br/>value = [19, 3]<br/>class = No Fraud>, fillcolor="#e99558"] ;
355 -> 356 ;
357 [label=<entropy = 0.0<br/>samples = 18<br/>value = [18, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
356 -> 357 ;
358 [label=<V2 ≤ -3.855<br/>entropy = 0.811<br/>samples = 4<br/>value = [1, 3]<br/>class = Fraud>, fillcolor="#7bbeee"] ;
356 -> 358 ;
359 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
358 -> 359 ;
360 [label=<entropy = 0.0<br/>samples = 3<br/>value = [0, 3]<br/>class = Fraud>, fillcolor="#399de5"] ;
358 -> 360 ;
361 [label=<V8 ≤ -0.255<br/>entropy = 0.015<br/>samples = 719<br/>value = [718, 1]<br/>class = No Fraud>, fillcolor="#e58139"] ;
355 -> 361 ;
362 [label=<entropy = 0.0<br/>samples = 718<br/>value = [718, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
361 -> 362 ;
363 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
361 -> 363 ;
364 [label=<entropy = 0.0<br/>samples = 7287<br/>value = [7287, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
354 -> 364 ;
365 [label=<V13 ≤ -1.06<br/>entropy = 0.339<br/>samples = 191<br/>value = [179, 12]<br/>class = No Fraud>, fillcolor="#e78946"] ;
353 -> 365 ;
366 [label=<V11 ≤ -0.113<br/>entropy = 0.9<br/>samples = 38<br/>value = [26, 12]<br/>class = No Fraud>, fillcolor="#f1bb94"] ;
365 -> 366 ;
367 [label=<V7 ≤ 1.434<br/>entropy = 0.48<br/>samples = 29<br/>value = [26, 3]<br/>class = No Fraud>, fillcolor="#e89050"] ;
366 -> 367 ;
368 [label=<entropy = 0.0<br/>samples = 24<br/>value = [24, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
367 -> 368 ;
369 [label=<V28 ≤ -0.107<br/>entropy = 0.971<br/>samples = 5<br/>value = [2, 3]<br/>class = Fraud>, fillcolor="#bddef6"] ;
367 -> 369 ;
370 [label=<entropy = 0.0<br/>samples = 3<br/>value = [0, 3]<br/>class = Fraud>, fillcolor="#399de5"] ;
369 -> 370 ;
371 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
369 -> 371 ;
372 [label=<entropy = 0.0<br/>samples = 9<br/>value = [0, 9]<br/>class = Fraud>, fillcolor="#399de5"] ;
366 -> 372 ;
373 [label=<entropy = 0.0<br/>samples = 153<br/>value = [153, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
365 -> 373 ;
374 [label=<V13 ≤ -0.661<br/>entropy = 0.5<br/>samples = 690<br/>value = [614, 76]<br/>class = No Fraud>, fillcolor="#e89152"] ;
284 -> 374 ;
375 [label=<V4 ≤ -0.72<br/>entropy = 0.962<br/>samples = 197<br/>value = [121, 76]<br/>class = No Fraud>, fillcolor="#f5d0b5"] ;
374 -> 375 ;
376 [label=<entropy = 0.0<br/>samples = 83<br/>value = [83, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
375 -> 376 ;
377 [label=<V25 ≤ 0.122<br/>entropy = 0.918<br/>samples = 114<br/>value = [38, 76]<br/>class = Fraud>, fillcolor="#9ccef2"] ;
375 -> 377 ;
378 [label=<V16 ≤ 0.612<br/>entropy = 0.738<br/>samples = 96<br/>value = [20, 76]<br/>class = Fraud>, fillcolor="#6db7ec"] ;
377 -> 378 ;
379 [label=<V5 ≤ 0.706<br/>entropy = 0.575<br/>samples = 88<br/>value = [12, 76]<br/>class = Fraud>, fillcolor="#58ace9"] ;
378 -> 379 ;
380 [label=<V11 ≤ -0.337<br/>entropy = 0.909<br/>samples = 37<br/>value = [12, 25]<br/>class = Fraud>, fillcolor="#98ccf1"] ;
379 -> 380 ;
381 [label=<entropy = 0.0<br/>samples = 9<br/>value = [9, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
380 -> 381 ;
382 [label=<V22 ≤ -0.315<br/>entropy = 0.491<br/>samples = 28<br/>value = [3, 25]<br/>class = Fraud>, fillcolor="#51a9e8"] ;
380 -> 382 ;
383 [label=<entropy = 0.0<br/>samples = 3<br/>value = [3, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
382 -> 383 ;
384 [label=<entropy = 0.0<br/>samples = 25<br/>value = [0, 25]<br/>class = Fraud>, fillcolor="#399de5"] ;
382 -> 384 ;
385 [label=<entropy = 0.0<br/>samples = 51<br/>value = [0, 51]<br/>class = Fraud>, fillcolor="#399de5"] ;
379 -> 385 ;
386 [label=<entropy = 0.0<br/>samples = 8<br/>value = [8, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
378 -> 386 ;
387 [label=<entropy = 0.0<br/>samples = 18<br/>value = [18, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
377 -> 387 ;
388 [label=<entropy = 0.0<br/>samples = 493<br/>value = [493, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
374 -> 388 ;
389 [label=<V15 ≤ -0.045<br/>entropy = 0.928<br/>samples = 201<br/>value = [132, 69]<br/>class = No Fraud>, fillcolor="#f3c3a0"] ;
283 -> 389 ;
390 [label=<entropy = 0.0<br/>samples = 94<br/>value = [94, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
389 -> 390 ;
391 [label=<V6 ≤ 0.906<br/>entropy = 0.939<br/>samples = 107<br/>value = [38, 69]<br/>class = Fraud>, fillcolor="#a6d3f3"] ;
389 -> 391 ;
392 [label=<V16 ≤ 0.381<br/>entropy = 0.481<br/>samples = 77<br/>value = [8, 69]<br/>class = Fraud>, fillcolor="#50a8e8"] ;
391 -> 392 ;
393 [label=<V2 ≤ -0.431<br/>entropy = 0.185<br/>samples = 71<br/>value = [2, 69]<br/>class = Fraud>, fillcolor="#3fa0e6"] ;
392 -> 393 ;
394 [label=<entropy = 0.0<br/>samples = 69<br/>value = [0, 69]<br/>class = Fraud>, fillcolor="#399de5"] ;
393 -> 394 ;
395 [label=<entropy = 0.0<br/>samples = 2<br/>value = [2, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
393 -> 395 ;
396 [label=<entropy = 0.0<br/>samples = 6<br/>value = [6, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
392 -> 396 ;
397 [label=<entropy = 0.0<br/>samples = 30<br/>value = [30, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
391 -> 397 ;
398 [label=<V14 ≤ 0.229<br/>entropy = 0.987<br/>samples = 3331<br/>value = [1885, 1446]<br/>class = No Fraud>, fillcolor="#f9e2d1"] ;
92 -> 398 ;
399 [label=<V3 ≤ 1.276<br/>entropy = 0.623<br/>samples = 721<br/>value = [112, 609]<br/>class = Fraud>, fillcolor="#5dafea"] ;
398 -> 399 ;
400 [label=<V9 ≤ 0.883<br/>entropy = 0.492<br/>samples = 680<br/>value = [73, 607]<br/>class = Fraud>, fillcolor="#51a9e8"] ;
399 -> 400 ;
401 [label=<V1 ≤ 1.44<br/>entropy = 0.386<br/>samples = 649<br/>value = [49, 600]<br/>class = Fraud>, fillcolor="#49a5e7"] ;
400 -> 401 ;
402 [label=<V17 ≤ 1.246<br/>entropy = 0.315<br/>samples = 632<br/>value = [36, 596]<br/>class = Fraud>, fillcolor="#45a3e7"] ;
401 -> 402 ;
403 [label=<V25 ≤ 0.78<br/>entropy = 0.178<br/>samples = 560<br/>value = [15, 545]<br/>class = Fraud>, fillcolor="#3ea0e6"] ;
402 -> 403 ;
404 [label=<V11 ≤ -0.657<br/>entropy = 0.092<br/>samples = 515<br/>value = [6, 509]<br/>class = Fraud>, fillcolor="#3b9ee5"] ;
403 -> 404 ;
405 [label=<V25 ≤ 0.147<br/>entropy = 0.485<br/>samples = 57<br/>value = [6, 51]<br/>class = Fraud>, fillcolor="#50a9e8"] ;
404 -> 405 ;
406 [label=<V18 ≤ 0.73<br/>entropy = 0.918<br/>samples = 18<br/>value = [6, 12]<br/>class = Fraud>, fillcolor="#9ccef2"] ;
405 -> 406 ;
407 [label=<V24 ≤ 0.716<br/>entropy = 0.391<br/>samples = 13<br/>value = [1, 12]<br/>class = Fraud>, fillcolor="#49a5e7"] ;
406 -> 407 ;
408 [label=<entropy = 0.0<br/>samples = 12<br/>value = [0, 12]<br/>class = Fraud>, fillcolor="#399de5"] ;
407 -> 408 ;
409 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
407 -> 409 ;
410 [label=<entropy = 0.0<br/>samples = 5<br/>value = [5, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
406 -> 410 ;
411 [label=<entropy = 0.0<br/>samples = 39<br/>value = [0, 39]<br/>class = Fraud>, fillcolor="#399de5"] ;
405 -> 411 ;
412 [label=<entropy = 0.0<br/>samples = 458<br/>value = [0, 458]<br/>class = Fraud>, fillcolor="#399de5"] ;
404 -> 412 ;
413 [label=<V12 ≤ 0.432<br/>entropy = 0.722<br/>samples = 45<br/>value = [9, 36]<br/>class = Fraud>, fillcolor="#6ab6ec"] ;
403 -> 413 ;
414 [label=<V20 ≤ -0.233<br/>entropy = 0.391<br/>samples = 39<br/>value = [3, 36]<br/>class = Fraud>, fillcolor="#49a5e7"] ;
413 -> 414 ;
415 [label=<entropy = 0.0<br/>samples = 34<br/>value = [0, 34]<br/>class = Fraud>, fillcolor="#399de5"] ;
414 -> 415 ;
416 [label=<V6 ≤ 0.875<br/>entropy = 0.971<br/>samples = 5<br/>value = [3, 2]<br/>class = No Fraud>, fillcolor="#f6d5bd"] ;
414 -> 416 ;
417 [label=<entropy = 0.0<br/>samples = 3<br/>value = [3, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
416 -> 417 ;
418 [label=<entropy = 0.0<br/>samples = 2<br/>value = [0, 2]<br/>class = Fraud>, fillcolor="#399de5"] ;
416 -> 418 ;
419 [label=<entropy = 0.0<br/>samples = 6<br/>value = [6, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
413 -> 419 ;
420 [label=<V2 ≤ -0.02<br/>entropy = 0.871<br/>samples = 72<br/>value = [21, 51]<br/>class = Fraud>, fillcolor="#8bc5f0"] ;
402 -> 420 ;
421 [label=<V4 ≤ 0.169<br/>entropy = 0.434<br/>samples = 56<br/>value = [5, 51]<br/>class = Fraud>, fillcolor="#4ca7e8"] ;
420 -> 421 ;
422 [label=<entropy = 0.0<br/>samples = 4<br/>value = [4, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
421 -> 422 ;
423 [label=<V5 ≤ 1.693<br/>entropy = 0.137<br/>samples = 52<br/>value = [1, 51]<br/>class = Fraud>, fillcolor="#3d9fe6"] ;
421 -> 423 ;
424 [label=<entropy = 0.0<br/>samples = 51<br/>value = [0, 51]<br/>class = Fraud>, fillcolor="#399de5"] ;
423 -> 424 ;
425 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
423 -> 425 ;
426 [label=<entropy = 0.0<br/>samples = 16<br/>value = [16, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
420 -> 426 ;
427 [label=<V23 ≤ -0.092<br/>entropy = 0.787<br/>samples = 17<br/>value = [13, 4]<br/>class = No Fraud>, fillcolor="#eda876"] ;
401 -> 427 ;
428 [label=<entropy = 0.0<br/>samples = 4<br/>value = [0, 4]<br/>class = Fraud>, fillcolor="#399de5"] ;
427 -> 428 ;
429 [label=<entropy = 0.0<br/>samples = 13<br/>value = [13, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
427 -> 429 ;
430 [label=<V7 ≤ -0.042<br/>entropy = 0.771<br/>samples = 31<br/>value = [24, 7]<br/>class = No Fraud>, fillcolor="#eda673"] ;
400 -> 430 ;
431 [label=<entropy = 0.0<br/>samples = 6<br/>value = [0, 6]<br/>class = Fraud>, fillcolor="#399de5"] ;
430 -> 431 ;
432 [label=<V21 ≤ 0.189<br/>entropy = 0.242<br/>samples = 25<br/>value = [24, 1]<br/>class = No Fraud>, fillcolor="#e68641"] ;
430 -> 432 ;
433 [label=<entropy = 0.0<br/>samples = 24<br/>value = [24, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
432 -> 433 ;
434 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
432 -> 434 ;
435 [label=<V13 ≤ -1.141<br/>entropy = 0.281<br/>samples = 41<br/>value = [39, 2]<br/>class = No Fraud>, fillcolor="#e68743"] ;
399 -> 435 ;
436 [label=<entropy = 0.0<br/>samples = 2<br/>value = [0, 2]<br/>class = Fraud>, fillcolor="#399de5"] ;
435 -> 436 ;
437 [label=<entropy = 0.0<br/>samples = 39<br/>value = [39, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
435 -> 437 ;
438 [label=<V8 ≤ -0.144<br/>entropy = 0.905<br/>samples = 2610<br/>value = [1773, 837]<br/>class = No Fraud>, fillcolor="#f1bc96"] ;
398 -> 438 ;
439 [label=<V12 ≤ 0.362<br/>entropy = 0.999<br/>samples = 1481<br/>value = [765, 716]<br/>class = No Fraud>, fillcolor="#fdf7f2"] ;
438 -> 439 ;
440 [label=<V14 ≤ 0.704<br/>entropy = 0.904<br/>samples = 781<br/>value = [250, 531]<br/>class = Fraud>, fillcolor="#96cbf1"] ;
439 -> 440 ;
441 [label=<V11 ≤ -1.228<br/>entropy = 0.578<br/>samples = 385<br/>value = [53, 332]<br/>class = Fraud>, fillcolor="#59ade9"] ;
440 -> 441 ;
442 [label=<V6 ≤ 0.467<br/>entropy = 0.941<br/>samples = 53<br/>value = [34, 19]<br/>class = No Fraud>, fillcolor="#f4c7a8"] ;
441 -> 442 ;
443 [label=<V6 ≤ 0.077<br/>entropy = 0.877<br/>samples = 27<br/>value = [8, 19]<br/>class = Fraud>, fillcolor="#8cc6f0"] ;
442 -> 443 ;
444 [label=<entropy = 0.0<br/>samples = 7<br/>value = [7, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
443 -> 444 ;
445 [label=<V12 ≤ 0.025<br/>entropy = 0.286<br/>samples = 20<br/>value = [1, 19]<br/>class = Fraud>, fillcolor="#43a2e6"] ;
443 -> 445 ;
446 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
445 -> 446 ;
447 [label=<entropy = 0.0<br/>samples = 19<br/>value = [0, 19]<br/>class = Fraud>, fillcolor="#399de5"] ;
445 -> 447 ;
448 [label=<entropy = 0.0<br/>samples = 26<br/>value = [26, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
442 -> 448 ;
449 [label=<V3 ≤ 1.602<br/>entropy = 0.316<br/>samples = 332<br/>value = [19, 313]<br/>class = Fraud>, fillcolor="#45a3e7"] ;
441 -> 449 ;
450 [label=<V7 ≤ 0.552<br/>entropy = 0.228<br/>samples = 325<br/>value = [12, 313]<br/>class = Fraud>, fillcolor="#41a1e6"] ;
449 -> 450 ;
451 [label=<entropy = 0.0<br/>samples = 166<br/>value = [0, 166]<br/>class = Fraud>, fillcolor="#399de5"] ;
450 -> 451 ;
452 [label=<V5 ≤ 0.98<br/>entropy = 0.386<br/>samples = 159<br/>value = [12, 147]<br/>class = Fraud>, fillcolor="#49a5e7"] ;
450 -> 452 ;
453 [label=<V6 ≤ 1.978<br/>entropy = 0.293<br/>samples = 155<br/>value = [8, 147]<br/>class = Fraud>, fillcolor="#44a2e6"] ;
452 -> 453 ;
454 [label=<Amount ≤ 1.463<br/>entropy = 0.209<br/>samples = 152<br/>value = [5, 147]<br/>class = Fraud>, fillcolor="#40a0e6"] ;
453 -> 454 ;
455 [label=<V23 ≤ 1.999<br/>entropy = 0.064<br/>samples = 133<br/>value = [1, 132]<br/>class = Fraud>, fillcolor="#3b9ee5"] ;
454 -> 455 ;
456 [label=<entropy = 0.0<br/>samples = 132<br/>value = [0, 132]<br/>class = Fraud>, fillcolor="#399de5"] ;
455 -> 456 ;
457 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
455 -> 457 ;
458 [label=<V15 ≤ 0.224<br/>entropy = 0.742<br/>samples = 19<br/>value = [4, 15]<br/>class = Fraud>, fillcolor="#6eb7ec"] ;
454 -> 458 ;
459 [label=<entropy = 0.0<br/>samples = 14<br/>value = [0, 14]<br/>class = Fraud>, fillcolor="#399de5"] ;
458 -> 459 ;
460 [label=<V14 ≤ 0.26<br/>entropy = 0.722<br/>samples = 5<br/>value = [4, 1]<br/>class = No Fraud>, fillcolor="#eca06a"] ;
458 -> 460 ;
461 [label=<entropy = 0.0<br/>samples = 1<br/>value = [0, 1]<br/>class = Fraud>, fillcolor="#399de5"] ;
460 -> 461 ;
462 [label=<entropy = 0.0<br/>samples = 4<br/>value = [4, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
460 -> 462 ;
463 [label=<entropy = 0.0<br/>samples = 3<br/>value = [3, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
453 -> 463 ;
464 [label=<entropy = 0.0<br/>samples = 4<br/>value = [4, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
452 -> 464 ;
465 [label=<entropy = 0.0<br/>samples = 7<br/>value = [7, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
449 -> 465 ;
466 [label=<V25 ≤ 0.766<br/>entropy = 1.0<br/>samples = 396<br/>value = [197, 199]<br/>class = Fraud>, fillcolor="#fdfeff"] ;
440 -> 466 ;
467 [label=<V1 ≤ 1.606<br/>entropy = 0.989<br/>samples = 355<br/>value = [156, 199]<br/>class = Fraud>, fillcolor="#d4eaf9"] ;
466 -> 467 ;
468 [label=<V18 ≤ 0.608<br/>entropy = 0.953<br/>samples = 316<br/>value = [118, 198]<br/>class = Fraud>, fillcolor="#afd7f4"] ;
467 -> 468 ;
469 [label=<V13 ≤ 1.805<br/>entropy = 0.894<br/>samples = 283<br/>value = [88, 195]<br/>class = Fraud>, fillcolor="#92c9f1"] ;
468 -> 469 ;
470 [label=<V7 ≤ 0.563<br/>entropy = 0.796<br/>samples = 245<br/>value = [59, 186]<br/>class = Fraud>, fillcolor="#78bced"] ;
469 -> 470 ;
471 [label=<V5 ≤ 0.186<br/>entropy = 0.414<br/>samples = 132<br/>value = [11, 121]<br/>class = Fraud>, fillcolor="#4ba6e7"] ;
470 -> 471 ;
472 [label=<V8 ≤ -2.267<br/>entropy = 0.868<br/>samples = 38<br/>value = [11, 27]<br/>class = Fraud>, fillcolor="#8ac5f0"] ;
471 -> 472 ;
473 [label=<entropy = 0.0<br/>samples = 27<br/>value = [0, 27]<br/>class = Fraud>, fillcolor="#399de5"] ;
472 -> 473 ;
474 [label=<entropy = 0.0<br/>samples = 11<br/>value = [11, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
472 -> 474 ;
475 [label=<entropy = 0.0<br/>samples = 94<br/>value = [0, 94]<br/>class = Fraud>, fillcolor="#399de5"] ;
471 -> 475 ;
476 [label=<V2 ≤ -0.048<br/>entropy = 0.984<br/>samples = 113<br/>value = [48, 65]<br/>class = Fraud>, fillcolor="#cbe5f8"] ;
470 -> 476 ;
477 [label=<V27 ≤ -0.558<br/>entropy = 0.922<br/>samples = 98<br/>value = [33, 65]<br/>class = Fraud>, fillcolor="#9ecff2"] ;
476 -> 477 ;
478 [label=<entropy = 0.0<br/>samples = 10<br/>value = [10, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
477 -> 478 ;
479 [label=<V13 ≤ -1.164<br/>entropy = 0.829<br/>samples = 88<br/>value = [23, 65]<br/>class = Fraud>, fillcolor="#7fc0ee"] ;
477 -> 479 ;
480 [label=<V10 ≤ 0.271<br/>entropy = 0.75<br/>samples = 14<br/>value = [11, 3]<br/>class = No Fraud>, fillcolor="#eca36f"] ;
479 -> 480 ;
481 [label=<entropy = 0.0<br/>samples = 3<br/>value = [0, 3]<br/>class = Fraud>, fillcolor="#399de5"] ;
480 -> 481 ;
482 [label=<entropy = 0.0<br/>samples = 11<br/>value = [11, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
480 -> 482 ;
483 [label=<V24 ≤ -0.179<br/>entropy = 0.639<br/>samples = 74<br/>value = [12, 62]<br/>class = Fraud>, fillcolor="#5fb0ea"] ;
479 -> 483 ;
484 [label=<entropy = 0.0<br/>samples = 5<br/>value = [5, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
483 -> 484 ;
485 [label=<V1 ≤ 1.403<br/>entropy = 0.474<br/>samples = 69<br/>value = [7, 62]<br/>class = Fraud>, fillcolor="#4fa8e8"] ;
483 -> 485 ;
486 [label=<V28 ≤ 0.089<br/>entropy = 0.33<br/>samples = 66<br/>value = [4, 62]<br/>class = Fraud>, fillcolor="#46a3e7"] ;
485 -> 486 ;
487 [label=<entropy = 0.0<br/>samples = 57<br/>value = [0, 57]<br/>class = Fraud>, fillcolor="#399de5"] ;
486 -> 487 ;
488 [label=<V25 ≤ -0.629<br/>entropy = 0.991<br/>samples = 9<br/>value = [4, 5]<br/>class = Fraud>, fillcolor="#d7ebfa"] ;
486 -> 488 ;
489 [label=<V14 ≤ 1.558<br/>entropy = 0.65<br/>samples = 6<br/>value = [1, 5]<br/>class = Fraud>, fillcolor="#61b1ea"] ;
488 -> 489 ;
490 [label=<entropy = 0.0<br/>samples = 5<br/>value = [0, 5]<br/>class = Fraud>, fillcolor="#399de5"] ;
489 -> 490 ;
491 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
489 -> 491 ;
492 [label=<entropy = 0.0<br/>samples = 3<br/>value = [3, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
488 -> 492 ;
493 [label=<entropy = 0.0<br/>samples = 3<br/>value = [3, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
485 -> 493 ;
494 [label=<entropy = 0.0<br/>samples = 15<br/>value = [15, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
476 -> 494 ;
495 [label=<V28 ≤ -1.54<br/>entropy = 0.79<br/>samples = 38<br/>value = [29, 9]<br/>class = No Fraud>, fillcolor="#eda876"] ;
469 -> 495 ;
496 [label=<V19 ≤ 0.814<br/>entropy = 0.469<br/>samples = 10<br/>value = [1, 9]<br/>class = Fraud>, fillcolor="#4fa8e8"] ;
495 -> 496 ;
497 [label=<entropy = 0.0<br/>samples = 9<br/>value = [0, 9]<br/>class = Fraud>, fillcolor="#399de5"] ;
496 -> 497 ;
498 [label=<entropy = 0.0<br/>samples = 1<br/>value = [1, 0]<br/>class = No Fraud>, fillcolor="#e58139"] ;
496 -> 498 ;