|
1
|
|
|
# Copyright 2014-2020 by Christopher C. Little. |
|
2
|
|
|
# This file is part of Abydos. |
|
3
|
|
|
# |
|
4
|
|
|
# Abydos is free software: you can redistribute it and/or modify |
|
5
|
|
|
# it under the terms of the GNU General Public License as published by |
|
6
|
|
|
# the Free Software Foundation, either version 3 of the License, or |
|
7
|
|
|
# (at your option) any later version. |
|
8
|
|
|
# |
|
9
|
|
|
# Abydos is distributed in the hope that it will be useful, |
|
10
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12
|
|
|
# GNU General Public License for more details. |
|
13
|
|
|
# |
|
14
|
|
|
# You should have received a copy of the GNU General Public License |
|
15
|
|
|
# along with Abydos. If not, see <http://www.gnu.org/licenses/>. |
|
16
|
|
|
|
|
17
|
|
|
r"""abydos.stats._confusion_table. |
|
18
|
|
|
|
|
19
|
1 |
|
This includes the ConfusionTable object, which includes members capable of |
|
20
|
|
|
calculating the following data based on a confusion table: |
|
21
|
|
|
|
|
22
|
|
|
- population counts |
|
23
|
|
|
- precision, recall, specificity, negative predictive value, fall-out, |
|
24
|
|
|
false discovery rate, accuracy, balanced accuracy, informedness, |
|
25
|
|
|
and markedness |
|
26
|
|
|
- various means of the precision & recall, including: arithmetic, |
|
27
|
|
|
geometric, harmonic, quadratic, logarithmic, contraharmonic, |
|
28
|
|
|
identric (exponential), & Hölder (power/generalized) means |
|
29
|
|
|
- :math:`F_{\beta}`-scores, :math:`E`-scores, :math:`G`-measures, along |
|
30
|
|
|
with special functions for :math:`F_{1}`, :math:`F_{0.5}`, & |
|
31
|
|
|
:math:`F_{2}` scores |
|
32
|
|
|
- significance & Matthews correlation coefficient calculation |
|
33
|
|
|
""" |
|
34
|
|
|
|
|
35
|
|
|
import math |
|
36
|
|
|
|
|
37
|
1 |
|
from typing import Dict, List, Tuple, Union |
|
38
|
|
|
|
|
39
|
|
|
from ._mean import ( |
|
40
|
|
|
aghmean, |
|
41
|
|
|
agmean, |
|
42
|
|
|
amean, |
|
43
|
|
|
cmean, |
|
44
|
1 |
|
ghmean, |
|
45
|
|
|
gmean, |
|
46
|
1 |
|
heronian_mean, |
|
47
|
|
|
hmean, |
|
48
|
1 |
|
hoelder_mean, |
|
49
|
|
|
imean, |
|
50
|
|
|
lehmer_mean, |
|
51
|
|
|
qmean, |
|
52
|
|
|
seiffert_mean, |
|
53
|
|
|
) |
|
54
|
|
|
|
|
55
|
|
|
__all__ = ['ConfusionTable'] |
|
56
|
|
|
|
|
57
|
|
|
|
|
58
|
|
|
class ConfusionTable: |
|
59
|
|
|
"""ConfusionTable object. |
|
60
|
|
|
|
|
61
|
|
|
This object is initialized by passing either four integers (or a tuple of |
|
62
|
|
|
four integers) representing the squares of a confusion table: |
|
63
|
1 |
|
true positives, true negatives, false positives, and false negatives |
|
64
|
|
|
|
|
65
|
1 |
|
The object possesses methods for the calculation of various statistics |
|
66
|
|
|
based on the confusion table. |
|
67
|
|
|
""" |
|
68
|
1 |
|
|
|
69
|
|
|
_tp, _tn, _fp, _fn = 0.0, 0.0, 0.0, 0.0 |
|
70
|
|
|
|
|
71
|
|
|
def __init__( |
|
72
|
|
|
self, |
|
73
|
|
|
tp: Union[ |
|
74
|
|
|
float, |
|
75
|
|
|
Tuple[float, float, float, float], |
|
76
|
|
|
List[float], |
|
77
|
|
|
Dict[str, float], |
|
78
|
|
|
] = 0.0, |
|
79
|
1 |
|
tn: float = 0.0, |
|
80
|
|
|
fp: float = 0.0, |
|
81
|
1 |
|
fn: float = 0.0, |
|
82
|
|
|
) -> None: |
|
83
|
|
|
"""Initialize ConfusionTable. |
|
84
|
|
|
|
|
85
|
|
|
Parameters |
|
86
|
|
|
---------- |
|
87
|
|
|
tp : float or a tuple, list, or dict |
|
88
|
|
|
True positives; If a tuple or list is supplied, it must include 4 |
|
89
|
|
|
values in the order [tp, tn, fp, fn]. If a dict is supplied, it |
|
90
|
|
|
must have 4 keys, namely 'tp', 'tn', 'fp', & 'fn'. |
|
91
|
|
|
tn : float |
|
92
|
|
|
True negatives |
|
93
|
|
|
fp : float |
|
94
|
|
|
False positives |
|
95
|
|
|
fn : float |
|
96
|
|
|
False negatives |
|
97
|
|
|
|
|
98
|
|
|
Raises |
|
99
|
|
|
------ |
|
100
|
|
|
AttributeError |
|
101
|
|
|
ConfusionTable requires a 4-tuple when being created from a tuple. |
|
102
|
|
|
|
|
103
|
|
|
Examples |
|
104
|
|
|
-------- |
|
105
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
106
|
|
|
>>> ct == ConfusionTable((120, 60, 20, 30)) |
|
107
|
|
|
True |
|
108
|
|
|
>>> ct == ConfusionTable([120, 60, 20, 30]) |
|
109
|
|
|
True |
|
110
|
|
|
>>> ct == ConfusionTable({'tp': 120, 'tn': 60, 'fp': 20, 'fn': 30}) |
|
111
|
|
|
True |
|
112
|
|
|
|
|
113
|
|
|
|
|
114
|
|
|
.. versionadded:: 0.1.0 |
|
115
|
|
|
|
|
116
|
1 |
|
""" |
|
117
|
1 |
|
if isinstance(tp, (tuple, list)): |
|
118
|
1 |
|
if len(tp) == 4: |
|
119
|
1 |
|
self._tp = tp[0] |
|
120
|
1 |
|
self._tn = tp[1] |
|
121
|
1 |
|
self._fp = tp[2] |
|
122
|
|
|
self._fn = tp[3] |
|
123
|
1 |
|
else: |
|
124
|
|
|
raise AttributeError( |
|
125
|
|
|
'ConfusionTable requires a 4-tuple when being created ' |
|
126
|
|
|
+ 'from a tuple.' |
|
127
|
1 |
|
) |
|
128
|
1 |
|
elif isinstance(tp, dict): |
|
129
|
1 |
|
if 'tp' in tp: |
|
130
|
1 |
|
self._tp = tp['tp'] |
|
131
|
1 |
|
if 'tn' in tp: |
|
132
|
1 |
|
self._tn = tp['tn'] |
|
133
|
1 |
|
if 'fp' in tp: |
|
134
|
1 |
|
self._fp = tp['fp'] |
|
135
|
1 |
|
if 'fn' in tp: |
|
136
|
|
|
self._fn = tp['fn'] |
|
137
|
1 |
|
else: |
|
138
|
1 |
|
self._tp = tp |
|
139
|
1 |
|
self._tn = tn |
|
140
|
1 |
|
self._fp = fp |
|
141
|
|
|
self._fn = fn |
|
142
|
1 |
|
|
|
143
|
|
|
def __eq__(self, other: object) -> bool: |
|
144
|
|
|
"""Perform eqality (==) comparison. |
|
145
|
|
|
|
|
146
|
|
|
Compares a ConfusionTable to another ConfusionTable or its equivalent |
|
147
|
|
|
in the form of a tuple, list, or dict. |
|
148
|
|
|
|
|
149
|
|
|
Parameters |
|
150
|
|
|
---------- |
|
151
|
|
|
other : ConfusionTable, tuple, list, or dict |
|
152
|
|
|
Another ConfusionTable object to compare to |
|
153
|
|
|
|
|
154
|
|
|
Returns |
|
155
|
|
|
------- |
|
156
|
|
|
bool |
|
157
|
|
|
True if two ConfusionTables are the same object or all four of |
|
158
|
|
|
their attributes are equal |
|
159
|
|
|
|
|
160
|
|
|
Examples |
|
161
|
|
|
-------- |
|
162
|
|
|
>>> ct1 = ConfusionTable(120, 60, 20, 30) |
|
163
|
|
|
>>> ct2 = ConfusionTable(120, 60, 20, 30) |
|
164
|
|
|
>>> ct3 = ConfusionTable(60, 30, 10, 15) |
|
165
|
|
|
|
|
166
|
|
|
>>> ct1 == ct2 |
|
167
|
|
|
True |
|
168
|
|
|
>>> ct1 == ct3 |
|
169
|
|
|
False |
|
170
|
|
|
|
|
171
|
|
|
>>> ct1 != ct2 |
|
172
|
|
|
False |
|
173
|
|
|
>>> ct1 != ct3 |
|
174
|
|
|
True |
|
175
|
|
|
|
|
176
|
|
|
|
|
177
|
|
|
.. versionadded:: 0.1.0 |
|
178
|
|
|
|
|
179
|
1 |
|
""" |
|
180
|
1 |
|
if isinstance(other, ConfusionTable): |
|
181
|
1 |
|
if id(self) == id(other): |
|
182
|
1 |
|
return True |
|
183
|
|
|
if ( |
|
184
|
|
|
self._tp == other.true_pos() |
|
185
|
|
|
and self._tn == other.true_neg() |
|
186
|
|
|
and self._fp == other.false_pos() |
|
187
|
|
|
and self._fn == other.false_neg() |
|
188
|
1 |
|
): |
|
189
|
1 |
|
return True |
|
190
|
1 |
|
elif isinstance(other, (tuple, list)): |
|
191
|
|
|
if ( |
|
192
|
|
|
self._tp == other[0] |
|
193
|
|
|
and self._tn == other[1] |
|
194
|
|
|
and self._fp == other[2] |
|
195
|
|
|
and self._fn == other[3] |
|
196
|
1 |
|
): |
|
197
|
1 |
|
return True |
|
198
|
1 |
|
elif isinstance(other, dict): |
|
199
|
|
|
if ( |
|
200
|
|
|
self._tp == other['tp'] |
|
201
|
|
|
and self._tn == other['tn'] |
|
202
|
|
|
and self._fp == other['fp'] |
|
203
|
|
|
and self._fn == other['fn'] |
|
204
|
1 |
|
): |
|
205
|
1 |
|
return True |
|
206
|
|
|
return False |
|
207
|
1 |
|
|
|
208
|
|
|
def __str__(self) -> str: |
|
209
|
|
|
"""Cast to str. |
|
210
|
|
|
|
|
211
|
|
|
Returns |
|
212
|
|
|
------- |
|
213
|
|
|
str |
|
214
|
|
|
A human-readable version of the confusion table |
|
215
|
|
|
|
|
216
|
|
|
Example |
|
217
|
|
|
------- |
|
218
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
219
|
|
|
>>> str(ct) |
|
220
|
|
|
'tp:120, tn:60, fp:20, fn:30' |
|
221
|
|
|
|
|
222
|
|
|
|
|
223
|
|
|
.. versionadded:: 0.1.0 |
|
224
|
|
|
|
|
225
|
1 |
|
""" |
|
226
|
|
|
return 'tp:{}, tn:{}, fp:{}, fn:{}'.format( |
|
227
|
|
|
self._tp, self._tn, self._fp, self._fn |
|
228
|
|
|
) |
|
229
|
1 |
|
|
|
230
|
|
|
def __repr__(self) -> str: |
|
231
|
|
|
"""Return representation. |
|
232
|
|
|
|
|
233
|
|
|
Returns |
|
234
|
|
|
------- |
|
235
|
|
|
str |
|
236
|
|
|
A string representation of the ConfusionTable that can be used to |
|
237
|
|
|
recreate it |
|
238
|
|
|
|
|
239
|
|
|
Example |
|
240
|
|
|
------- |
|
241
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
242
|
|
|
>>> repr(ct) |
|
243
|
|
|
'ConfusionTable(tp=120, tn=60, fp=20, fn=30)' |
|
244
|
|
|
|
|
245
|
|
|
|
|
246
|
|
|
.. versionadded:: 0.4.0 |
|
247
|
|
|
|
|
248
|
1 |
|
""" |
|
249
|
|
|
return 'ConfusionTable(tp={}, tn={}, fp={}, fn={})'.format( |
|
250
|
|
|
self._tp, self._tn, self._fp, self._fn |
|
251
|
|
|
) |
|
252
|
1 |
|
|
|
253
|
|
|
def to_tuple(self) -> Tuple[float, float, float, float]: |
|
254
|
|
|
"""Cast to tuple. |
|
255
|
|
|
|
|
256
|
|
|
Returns |
|
257
|
|
|
------- |
|
258
|
|
|
tuple |
|
259
|
|
|
The confusion table as a 4-tuple (tp, tn, fp, fn) |
|
260
|
|
|
|
|
261
|
|
|
Example |
|
262
|
|
|
------- |
|
263
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
264
|
|
|
>>> ct.to_tuple() |
|
265
|
|
|
(120, 60, 20, 30) |
|
266
|
|
|
|
|
267
|
|
|
|
|
268
|
|
|
.. versionadded:: 0.1.0 |
|
269
|
|
|
|
|
270
|
1 |
|
""" |
|
271
|
|
|
return self._tp, self._tn, self._fp, self._fn |
|
272
|
1 |
|
|
|
273
|
|
|
def to_dict(self) -> Dict[str, float]: |
|
274
|
|
|
"""Cast to dict. |
|
275
|
|
|
|
|
276
|
|
|
Returns |
|
277
|
|
|
------- |
|
278
|
|
|
dict |
|
279
|
|
|
The confusion table as a dict |
|
280
|
|
|
|
|
281
|
|
|
Example |
|
282
|
|
|
------- |
|
283
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
284
|
|
|
>>> import pprint |
|
285
|
|
|
>>> pprint.pprint(ct.to_dict()) |
|
286
|
|
|
{'fn': 30, 'fp': 20, 'tn': 60, 'tp': 120} |
|
287
|
|
|
|
|
288
|
|
|
|
|
289
|
|
|
.. versionadded:: 0.1.0 |
|
290
|
|
|
|
|
291
|
1 |
|
""" |
|
292
|
|
|
return {'tp': self._tp, 'tn': self._tn, 'fp': self._fp, 'fn': self._fn} |
|
293
|
1 |
|
|
|
294
|
|
|
def true_pos(self) -> float: |
|
295
|
|
|
"""Return true positives. |
|
296
|
|
|
|
|
297
|
|
|
Returns |
|
298
|
|
|
------- |
|
299
|
|
|
float |
|
300
|
|
|
The true positives of the confusion table |
|
301
|
|
|
|
|
302
|
|
|
Example |
|
303
|
|
|
------- |
|
304
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
305
|
|
|
>>> ct.true_pos() |
|
306
|
|
|
120 |
|
307
|
|
|
|
|
308
|
|
|
|
|
309
|
|
|
.. versionadded:: 0.1.0 |
|
310
|
|
|
|
|
311
|
1 |
|
""" |
|
312
|
|
|
return self._tp |
|
313
|
1 |
|
|
|
314
|
|
|
def true_neg(self) -> float: |
|
315
|
|
|
"""Return true negatives. |
|
316
|
|
|
|
|
317
|
|
|
Returns |
|
318
|
|
|
------- |
|
319
|
|
|
float |
|
320
|
|
|
The true negatives of the confusion table |
|
321
|
|
|
|
|
322
|
|
|
Example |
|
323
|
|
|
------- |
|
324
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
325
|
|
|
>>> ct.true_neg() |
|
326
|
|
|
60 |
|
327
|
|
|
|
|
328
|
|
|
|
|
329
|
|
|
.. versionadded:: 0.1.0 |
|
330
|
|
|
|
|
331
|
1 |
|
""" |
|
332
|
|
|
return self._tn |
|
333
|
1 |
|
|
|
334
|
|
|
def false_pos(self) -> float: |
|
335
|
|
|
"""Return false positives. |
|
336
|
|
|
|
|
337
|
|
|
AKA Type I error |
|
338
|
|
|
|
|
339
|
|
|
Returns |
|
340
|
|
|
------- |
|
341
|
|
|
float |
|
342
|
|
|
The false positives of the confusion table |
|
343
|
|
|
|
|
344
|
|
|
Example |
|
345
|
|
|
------- |
|
346
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
347
|
|
|
>>> ct.false_pos() |
|
348
|
|
|
20 |
|
349
|
|
|
|
|
350
|
|
|
|
|
351
|
|
|
.. versionadded:: 0.1.0 |
|
352
|
|
|
|
|
353
|
1 |
|
""" |
|
354
|
|
|
return self._fp |
|
355
|
1 |
|
|
|
356
|
|
|
def false_neg(self) -> float: |
|
357
|
|
|
"""Return false negatives. |
|
358
|
|
|
|
|
359
|
|
|
AKA Type II error |
|
360
|
|
|
|
|
361
|
|
|
Returns |
|
362
|
|
|
------- |
|
363
|
|
|
float |
|
364
|
|
|
The false negatives of the confusion table |
|
365
|
|
|
|
|
366
|
|
|
Example |
|
367
|
|
|
------- |
|
368
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
369
|
|
|
>>> ct.false_neg() |
|
370
|
|
|
30 |
|
371
|
|
|
|
|
372
|
|
|
|
|
373
|
|
|
.. versionadded:: 0.1.0 |
|
374
|
|
|
|
|
375
|
1 |
|
""" |
|
376
|
|
|
return self._fn |
|
377
|
1 |
|
|
|
378
|
|
|
def correct_pop(self) -> float: |
|
379
|
|
|
"""Return correct population. |
|
380
|
|
|
|
|
381
|
|
|
Returns |
|
382
|
|
|
------- |
|
383
|
|
|
float |
|
384
|
|
|
The correct population of the confusion table |
|
385
|
|
|
|
|
386
|
|
|
Example |
|
387
|
|
|
------- |
|
388
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
389
|
|
|
>>> ct.correct_pop() |
|
390
|
|
|
180 |
|
391
|
|
|
|
|
392
|
|
|
|
|
393
|
|
|
.. versionadded:: 0.1.0 |
|
394
|
|
|
|
|
395
|
1 |
|
""" |
|
396
|
|
|
return self._tp + self._tn |
|
397
|
1 |
|
|
|
398
|
|
|
def error_pop(self) -> float: |
|
399
|
|
|
"""Return error population. |
|
400
|
|
|
|
|
401
|
|
|
Returns |
|
402
|
|
|
------- |
|
403
|
|
|
float |
|
404
|
|
|
The error population of the confusion table |
|
405
|
|
|
|
|
406
|
|
|
Example |
|
407
|
|
|
------- |
|
408
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
409
|
|
|
>>> ct.error_pop() |
|
410
|
|
|
50 |
|
411
|
|
|
|
|
412
|
|
|
|
|
413
|
|
|
.. versionadded:: 0.1.0 |
|
414
|
|
|
|
|
415
|
1 |
|
""" |
|
416
|
|
|
return self._fp + self._fn |
|
417
|
1 |
|
|
|
418
|
|
|
def pred_pos_pop(self) -> float: |
|
419
|
|
|
"""Return predicted positive population. |
|
420
|
|
|
|
|
421
|
|
|
Returns |
|
422
|
|
|
------- |
|
423
|
|
|
float |
|
424
|
|
|
The predicted positive population of the confusion table |
|
425
|
|
|
|
|
426
|
|
|
Example |
|
427
|
|
|
------- |
|
428
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
429
|
|
|
>>> ct.pred_pos_pop() |
|
430
|
|
|
140 |
|
431
|
|
|
|
|
432
|
|
|
|
|
433
|
|
|
.. versionadded:: 0.1.0 |
|
434
|
|
|
.. versionchanged:: 0.4.0 |
|
435
|
|
|
renamed from test_pos_pop |
|
436
|
|
|
|
|
437
|
|
|
|
|
438
|
|
|
.. versionadded:: 0.1.0 |
|
439
|
|
|
|
|
440
|
1 |
|
""" |
|
441
|
|
|
return self._tp + self._fp |
|
442
|
1 |
|
|
|
443
|
|
|
def pred_neg_pop(self) -> float: |
|
444
|
|
|
"""Return predicted negative population. |
|
445
|
|
|
|
|
446
|
|
|
Returns |
|
447
|
|
|
------- |
|
448
|
|
|
float |
|
449
|
|
|
The predicted negative population of the confusion table |
|
450
|
|
|
|
|
451
|
|
|
Example |
|
452
|
|
|
------- |
|
453
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
454
|
|
|
>>> ct.pred_neg_pop() |
|
455
|
|
|
90 |
|
456
|
|
|
|
|
457
|
|
|
|
|
458
|
|
|
.. versionadded:: 0.1.0 |
|
459
|
|
|
.. versionchanged:: 0.4.0 |
|
460
|
|
|
renamed from test_neg_pop |
|
461
|
|
|
|
|
462
|
|
|
|
|
463
|
|
|
.. versionadded:: 0.1.0 |
|
464
|
|
|
|
|
465
|
1 |
|
""" |
|
466
|
|
|
return self._tn + self._fn |
|
467
|
1 |
|
|
|
468
|
|
|
def cond_pos_pop(self) -> float: |
|
469
|
|
|
"""Return condition positive population. |
|
470
|
|
|
|
|
471
|
|
|
Returns |
|
472
|
|
|
------- |
|
473
|
|
|
float |
|
474
|
|
|
The condition positive population of the confusion table |
|
475
|
|
|
|
|
476
|
|
|
Example |
|
477
|
|
|
------- |
|
478
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
479
|
|
|
>>> ct.cond_pos_pop() |
|
480
|
|
|
150 |
|
481
|
|
|
|
|
482
|
|
|
|
|
483
|
|
|
.. versionadded:: 0.1.0 |
|
484
|
|
|
|
|
485
|
1 |
|
""" |
|
486
|
|
|
return self._tp + self._fn |
|
487
|
1 |
|
|
|
488
|
|
|
def cond_neg_pop(self) -> float: |
|
489
|
|
|
"""Return condition negative population. |
|
490
|
|
|
|
|
491
|
|
|
Returns |
|
492
|
|
|
------- |
|
493
|
|
|
float |
|
494
|
|
|
The condition negative population of the confusion table |
|
495
|
|
|
|
|
496
|
|
|
Example |
|
497
|
|
|
------- |
|
498
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
499
|
|
|
>>> ct.cond_neg_pop() |
|
500
|
|
|
80 |
|
501
|
|
|
|
|
502
|
|
|
|
|
503
|
|
|
.. versionadded:: 0.1.0 |
|
504
|
|
|
|
|
505
|
1 |
|
""" |
|
506
|
|
|
return self._fp + self._tn |
|
507
|
1 |
|
|
|
508
|
|
|
def population(self) -> float: |
|
509
|
|
|
"""Return population, N. |
|
510
|
|
|
|
|
511
|
|
|
Returns |
|
512
|
|
|
------- |
|
513
|
|
|
float |
|
514
|
|
|
The population (N) of the confusion table |
|
515
|
|
|
|
|
516
|
|
|
Example |
|
517
|
|
|
------- |
|
518
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
519
|
|
|
>>> ct.population() |
|
520
|
|
|
230 |
|
521
|
|
|
|
|
522
|
|
|
|
|
523
|
|
|
.. versionadded:: 0.1.0 |
|
524
|
|
|
|
|
525
|
1 |
|
""" |
|
526
|
|
|
return self._tp + self._tn + self._fp + self._fn |
|
527
|
1 |
|
|
|
528
|
|
|
def precision(self) -> float: |
|
529
|
|
|
r"""Return precision. |
|
530
|
|
|
|
|
531
|
|
|
Precision is defined as |
|
532
|
|
|
|
|
533
|
|
|
.. math:: |
|
534
|
|
|
|
|
535
|
|
|
\frac{tp}{tp + fp} |
|
536
|
|
|
|
|
537
|
|
|
AKA positive predictive value (PPV) |
|
538
|
|
|
|
|
539
|
|
|
Cf. https://en.wikipedia.org/wiki/Precision_and_recall |
|
540
|
|
|
|
|
541
|
|
|
Cf. https://en.wikipedia.org/wiki/Information_retrieval#Precision |
|
542
|
|
|
|
|
543
|
|
|
Returns |
|
544
|
|
|
------- |
|
545
|
|
|
float |
|
546
|
|
|
The precision of the confusion table |
|
547
|
|
|
|
|
548
|
|
|
Example |
|
549
|
|
|
------- |
|
550
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
551
|
|
|
>>> ct.precision() |
|
552
|
|
|
0.8571428571428571 |
|
553
|
|
|
|
|
554
|
|
|
|
|
555
|
|
|
.. versionadded:: 0.1.0 |
|
556
|
|
|
|
|
557
|
1 |
|
""" |
|
558
|
1 |
|
try: |
|
559
|
1 |
|
return self._tp / (self._tp + self._fp) |
|
560
|
1 |
|
except ZeroDivisionError: |
|
561
|
|
|
return float('nan') |
|
562
|
1 |
|
|
|
563
|
|
|
def precision_gain(self) -> float: |
|
564
|
|
|
r"""Return gain in precision. |
|
565
|
|
|
|
|
566
|
|
|
The gain in precision is defined as |
|
567
|
|
|
|
|
568
|
|
|
.. math:: |
|
569
|
|
|
|
|
570
|
|
|
G(precision) = \frac{precision}{random~ precision} |
|
571
|
|
|
|
|
572
|
|
|
Cf. https://en.wikipedia.org/wiki/Gain_(information_retrieval) |
|
573
|
|
|
|
|
574
|
|
|
Returns |
|
575
|
|
|
------- |
|
576
|
|
|
float |
|
577
|
|
|
The gain in precision of the confusion table |
|
578
|
|
|
|
|
579
|
|
|
Example |
|
580
|
|
|
------- |
|
581
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
582
|
|
|
>>> ct.precision_gain() |
|
583
|
|
|
1.3142857142857143 |
|
584
|
|
|
|
|
585
|
|
|
|
|
586
|
|
|
.. versionadded:: 0.1.0 |
|
587
|
|
|
|
|
588
|
1 |
|
""" |
|
589
|
1 |
|
try: |
|
590
|
1 |
|
random_precision = self.cond_pos_pop() / self.population() |
|
591
|
1 |
|
return self.precision() / random_precision |
|
592
|
1 |
|
except ZeroDivisionError: |
|
593
|
|
|
return float('nan') |
|
594
|
1 |
|
|
|
595
|
|
|
def recall(self) -> float: |
|
596
|
|
|
r"""Return recall. |
|
597
|
|
|
|
|
598
|
|
|
Recall is defined as |
|
599
|
|
|
|
|
600
|
|
|
.. math:: |
|
601
|
|
|
|
|
602
|
|
|
\frac{tp}{tp + fn} |
|
603
|
|
|
|
|
604
|
|
|
AKA sensitivity |
|
605
|
|
|
|
|
606
|
|
|
AKA true positive rate (TPR) |
|
607
|
|
|
|
|
608
|
|
|
Cf. https://en.wikipedia.org/wiki/Precision_and_recall |
|
609
|
|
|
|
|
610
|
|
|
Cf. https://en.wikipedia.org/wiki/Sensitivity_(test) |
|
611
|
|
|
|
|
612
|
|
|
Cf. https://en.wikipedia.org/wiki/Information_retrieval#Recall |
|
613
|
|
|
|
|
614
|
|
|
Returns |
|
615
|
|
|
------- |
|
616
|
|
|
float |
|
617
|
|
|
The recall of the confusion table |
|
618
|
|
|
|
|
619
|
|
|
Example |
|
620
|
|
|
------- |
|
621
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
622
|
|
|
>>> ct.recall() |
|
623
|
|
|
0.8 |
|
624
|
|
|
|
|
625
|
|
|
|
|
626
|
|
|
.. versionadded:: 0.1.0 |
|
627
|
|
|
|
|
628
|
1 |
|
""" |
|
629
|
1 |
|
try: |
|
630
|
1 |
|
return self._tp / (self._tp + self._fn) |
|
631
|
1 |
|
except ZeroDivisionError: |
|
632
|
|
|
return float('nan') |
|
633
|
1 |
|
|
|
634
|
|
|
def specificity(self) -> float: |
|
635
|
|
|
r"""Return specificity. |
|
636
|
|
|
|
|
637
|
|
|
Specificity is defined as |
|
638
|
|
|
|
|
639
|
|
|
.. math:: |
|
640
|
|
|
|
|
641
|
|
|
\frac{tn}{tn + fp} |
|
642
|
|
|
|
|
643
|
|
|
AKA true negative rate (TNR) |
|
644
|
|
|
|
|
645
|
|
|
AKA inverse recall |
|
646
|
|
|
|
|
647
|
|
|
Cf. https://en.wikipedia.org/wiki/Specificity_(tests) |
|
648
|
|
|
|
|
649
|
|
|
Returns |
|
650
|
|
|
------- |
|
651
|
|
|
float |
|
652
|
|
|
The specificity of the confusion table |
|
653
|
|
|
|
|
654
|
|
|
Example |
|
655
|
|
|
------- |
|
656
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
657
|
|
|
>>> ct.specificity() |
|
658
|
|
|
0.75 |
|
659
|
|
|
|
|
660
|
|
|
|
|
661
|
|
|
.. versionadded:: 0.1.0 |
|
662
|
|
|
|
|
663
|
1 |
|
""" |
|
664
|
1 |
|
try: |
|
665
|
1 |
|
return self._tn / (self._tn + self._fp) |
|
666
|
1 |
|
except ZeroDivisionError: |
|
667
|
|
|
return float('nan') |
|
668
|
1 |
|
|
|
669
|
|
|
def fnr(self) -> float: |
|
670
|
|
|
r"""Return false negative rate. |
|
671
|
|
|
|
|
672
|
|
|
False negative rate is defined as |
|
673
|
|
|
|
|
674
|
|
|
.. math:: |
|
675
|
|
|
|
|
676
|
|
|
\frac{fn}{tp + fn} |
|
677
|
|
|
|
|
678
|
|
|
AKA miss rate |
|
679
|
|
|
|
|
680
|
|
|
Cf. https://en.wikipedia.org/wiki/False_negative_rate |
|
681
|
|
|
|
|
682
|
|
|
Returns |
|
683
|
|
|
------- |
|
684
|
|
|
float |
|
685
|
|
|
The false negative rate of the confusion table |
|
686
|
|
|
|
|
687
|
|
|
Example |
|
688
|
|
|
------- |
|
689
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
690
|
|
|
>>> round(ct.fnr(), 8) |
|
691
|
|
|
0.2 |
|
692
|
|
|
|
|
693
|
|
|
|
|
694
|
|
|
.. versionadded:: 0.4.0 |
|
695
|
|
|
|
|
696
|
1 |
|
""" |
|
697
|
|
|
return 1 - self.recall() |
|
698
|
1 |
|
|
|
699
|
|
|
def npv(self) -> float: |
|
700
|
|
|
r"""Return negative predictive value (NPV). |
|
701
|
|
|
|
|
702
|
|
|
NPV is defined as |
|
703
|
|
|
|
|
704
|
|
|
.. math:: |
|
705
|
|
|
|
|
706
|
|
|
\frac{tn}{tn + fn} |
|
707
|
|
|
|
|
708
|
|
|
AKA inverse precision |
|
709
|
|
|
|
|
710
|
|
|
Cf. https://en.wikipedia.org/wiki/Negative_predictive_value |
|
711
|
|
|
|
|
712
|
|
|
Returns |
|
713
|
|
|
------- |
|
714
|
|
|
float |
|
715
|
|
|
The negative predictive value of the confusion table |
|
716
|
|
|
|
|
717
|
|
|
Example |
|
718
|
|
|
------- |
|
719
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
720
|
|
|
>>> ct.npv() |
|
721
|
|
|
0.6666666666666666 |
|
722
|
|
|
|
|
723
|
|
|
|
|
724
|
|
|
.. versionadded:: 0.1.0 |
|
725
|
|
|
|
|
726
|
1 |
|
""" |
|
727
|
1 |
|
try: |
|
728
|
1 |
|
return self._tn / (self._tn + self._fn) |
|
729
|
1 |
|
except ZeroDivisionError: |
|
730
|
|
|
return float('nan') |
|
731
|
1 |
|
|
|
732
|
|
|
def false_omission_rate(self) -> float: |
|
733
|
|
|
r"""Return false omission rate (FOR). |
|
734
|
|
|
|
|
735
|
|
|
FOR is defined as |
|
736
|
|
|
|
|
737
|
|
|
.. math:: |
|
738
|
|
|
|
|
739
|
|
|
\frac{fn}{tn + fn} |
|
740
|
|
|
|
|
741
|
|
|
Cf. https://en.wikipedia.org/wiki/False_omission_rate |
|
742
|
|
|
|
|
743
|
|
|
Returns |
|
744
|
|
|
------- |
|
745
|
|
|
float |
|
746
|
|
|
The false omission rate of the confusion table |
|
747
|
|
|
|
|
748
|
|
|
Example |
|
749
|
|
|
------- |
|
750
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
751
|
|
|
>>> ct.false_omission_rate() |
|
752
|
|
|
0.3333333333333333 |
|
753
|
|
|
|
|
754
|
|
|
|
|
755
|
|
|
.. versionadded:: 0.4.0 |
|
756
|
|
|
|
|
757
|
1 |
|
""" |
|
758
|
1 |
|
try: |
|
759
|
1 |
|
return self._fn / (self._tn + self._fn) |
|
760
|
1 |
|
except ZeroDivisionError: |
|
761
|
|
|
return float('nan') |
|
762
|
1 |
|
|
|
763
|
|
|
def fallout(self) -> float: |
|
764
|
|
|
r"""Return fall-out. |
|
765
|
|
|
|
|
766
|
|
|
Fall-out is defined as |
|
767
|
|
|
|
|
768
|
|
|
.. math:: |
|
769
|
|
|
|
|
770
|
|
|
\frac{fp}{fp + tn} |
|
771
|
|
|
|
|
772
|
|
|
AKA false positive rate (FPR) |
|
773
|
|
|
|
|
774
|
|
|
Cf. https://en.wikipedia.org/wiki/Information_retrieval#Fall-out |
|
775
|
|
|
|
|
776
|
|
|
Returns |
|
777
|
|
|
------- |
|
778
|
|
|
float |
|
779
|
|
|
The fall-out of the confusion table |
|
780
|
|
|
|
|
781
|
|
|
Example |
|
782
|
|
|
------- |
|
783
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
784
|
|
|
>>> ct.fallout() |
|
785
|
|
|
0.25 |
|
786
|
|
|
|
|
787
|
|
|
|
|
788
|
|
|
.. versionadded:: 0.1.0 |
|
789
|
|
|
|
|
790
|
1 |
|
""" |
|
791
|
|
|
return 1 - self.specificity() |
|
792
|
1 |
|
|
|
793
|
|
|
def pos_likelihood_ratio(self) -> float: |
|
794
|
|
|
r"""Return positive likelihood ratio. |
|
795
|
|
|
|
|
796
|
|
|
Positive likelihood ratio is defined as |
|
797
|
|
|
|
|
798
|
|
|
.. math:: |
|
799
|
|
|
|
|
800
|
|
|
\frac{recall}{1-specificity} |
|
801
|
|
|
|
|
802
|
|
|
Cf. |
|
803
|
|
|
https://en.wikipedia.org/wiki/Likelihood_ratios_in_diagnostic_testing |
|
804
|
|
|
|
|
805
|
|
|
Returns |
|
806
|
|
|
------- |
|
807
|
|
|
float |
|
808
|
|
|
The positive likelihood ratio of the confusion table |
|
809
|
|
|
|
|
810
|
|
|
Example |
|
811
|
|
|
------- |
|
812
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
813
|
|
|
>>> ct.pos_likelihood_ratio() |
|
814
|
|
|
3.2 |
|
815
|
|
|
|
|
816
|
|
|
|
|
817
|
|
|
.. versionadded:: 0.4.0 |
|
818
|
|
|
|
|
819
|
1 |
|
""" |
|
820
|
|
|
return self.recall() / (1.0 - self.specificity()) |
|
821
|
1 |
|
|
|
822
|
|
|
def neg_likelihood_ratio(self) -> float: |
|
823
|
|
|
r"""Return negative likelihood ratio. |
|
824
|
|
|
|
|
825
|
|
|
Negative likelihood ratio is defined as |
|
826
|
|
|
|
|
827
|
|
|
.. math:: |
|
828
|
|
|
|
|
829
|
|
|
\frac{1-recall}{specificity} |
|
830
|
|
|
|
|
831
|
|
|
Cf. |
|
832
|
|
|
https://en.wikipedia.org/wiki/Likelihood_ratios_in_diagnostic_testing |
|
833
|
|
|
|
|
834
|
|
|
Returns |
|
835
|
|
|
------- |
|
836
|
|
|
float |
|
837
|
|
|
The negative likelihood ratio of the confusion table |
|
838
|
|
|
|
|
839
|
|
|
Example |
|
840
|
|
|
------- |
|
841
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
842
|
|
|
>>> ct.neg_likelihood_ratio() |
|
843
|
|
|
0.2666666666666666 |
|
844
|
|
|
|
|
845
|
|
|
|
|
846
|
|
|
.. versionadded:: 0.4.0 |
|
847
|
|
|
|
|
848
|
1 |
|
""" |
|
849
|
|
|
return (1.0 - self.recall()) / self.specificity() |
|
850
|
1 |
|
|
|
851
|
|
|
def diagnostic_odds_ratio(self) -> float: |
|
852
|
|
|
r"""Return diagnostic odds ratio. |
|
853
|
|
|
|
|
854
|
|
|
Diagnostic odds ratio is defined as |
|
855
|
|
|
|
|
856
|
|
|
.. math:: |
|
857
|
|
|
|
|
858
|
|
|
\frac{tp \cdot tn}{fp \cdot fn} |
|
859
|
|
|
|
|
860
|
|
|
Cf. |
|
861
|
|
|
https://en.wikipedia.org/wiki/Diagnostic_odds_ratio |
|
862
|
|
|
|
|
863
|
|
|
Returns |
|
864
|
|
|
------- |
|
865
|
|
|
float |
|
866
|
|
|
The negative likelihood ratio of the confusion table |
|
867
|
|
|
|
|
868
|
|
|
Example |
|
869
|
|
|
------- |
|
870
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
871
|
|
|
>>> ct.diagnostic_odds_ratio() |
|
872
|
|
|
12.0 |
|
873
|
|
|
|
|
874
|
|
|
|
|
875
|
|
|
.. versionadded:: 0.4.0 |
|
876
|
|
|
|
|
877
|
1 |
|
""" |
|
878
|
1 |
|
try: |
|
879
|
1 |
|
return (self._tp * self._tn) / (self._fp * self._fn) |
|
880
|
1 |
|
except ZeroDivisionError: |
|
881
|
|
|
return float('nan') |
|
882
|
1 |
|
|
|
883
|
|
|
def fdr(self) -> float: |
|
884
|
|
|
r"""Return false discovery rate (FDR). |
|
885
|
|
|
|
|
886
|
|
|
False discovery rate is defined as |
|
887
|
|
|
|
|
888
|
|
|
.. math:: |
|
889
|
|
|
|
|
890
|
|
|
\frac{fp}{fp + tp} |
|
891
|
|
|
|
|
892
|
|
|
Cf. https://en.wikipedia.org/wiki/False_discovery_rate |
|
893
|
|
|
|
|
894
|
|
|
Returns |
|
895
|
|
|
------- |
|
896
|
|
|
float |
|
897
|
|
|
The false discovery rate of the confusion table |
|
898
|
|
|
|
|
899
|
|
|
Example |
|
900
|
|
|
------- |
|
901
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
902
|
|
|
>>> ct.fdr() |
|
903
|
|
|
0.14285714285714285 |
|
904
|
|
|
|
|
905
|
|
|
|
|
906
|
|
|
.. versionadded:: 0.1.0 |
|
907
|
|
|
|
|
908
|
1 |
|
""" |
|
909
|
1 |
|
try: |
|
910
|
1 |
|
return self._fp / (self._fp + self._tp) |
|
911
|
1 |
|
except ZeroDivisionError: |
|
912
|
|
|
return float('nan') |
|
913
|
1 |
|
|
|
914
|
|
|
def accuracy(self) -> float: |
|
915
|
|
|
r"""Return accuracy. |
|
916
|
|
|
|
|
917
|
|
|
Accuracy is defined as |
|
918
|
|
|
|
|
919
|
|
|
.. math:: |
|
920
|
|
|
|
|
921
|
|
|
\frac{tp + tn}{population} |
|
922
|
|
|
|
|
923
|
|
|
Cf. https://en.wikipedia.org/wiki/Accuracy |
|
924
|
|
|
|
|
925
|
|
|
Returns |
|
926
|
|
|
------- |
|
927
|
|
|
float |
|
928
|
|
|
The accuracy of the confusion table |
|
929
|
|
|
|
|
930
|
|
|
Example |
|
931
|
|
|
------- |
|
932
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
933
|
|
|
>>> ct.accuracy() |
|
934
|
|
|
0.782608695652174 |
|
935
|
|
|
|
|
936
|
|
|
|
|
937
|
|
|
.. versionadded:: 0.1.0 |
|
938
|
|
|
|
|
939
|
1 |
|
""" |
|
940
|
1 |
|
try: |
|
941
|
1 |
|
return (self._tp + self._tn) / self.population() |
|
942
|
1 |
|
except ZeroDivisionError: |
|
943
|
|
|
return float('nan') |
|
944
|
1 |
|
|
|
945
|
|
|
def accuracy_gain(self) -> float: |
|
946
|
|
|
r"""Return gain in accuracy. |
|
947
|
|
|
|
|
948
|
|
|
The gain in accuracy is defined as |
|
949
|
|
|
|
|
950
|
|
|
.. math:: |
|
951
|
|
|
|
|
952
|
|
|
G(accuracy) = \frac{accuracy}{random~ accuracy} |
|
953
|
|
|
|
|
954
|
|
|
Cf. https://en.wikipedia.org/wiki/Gain_(information_retrieval) |
|
955
|
|
|
|
|
956
|
|
|
Returns |
|
957
|
|
|
------- |
|
958
|
|
|
float |
|
959
|
|
|
The gain in accuracy of the confusion table |
|
960
|
|
|
|
|
961
|
|
|
Example |
|
962
|
|
|
------- |
|
963
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
964
|
|
|
>>> ct.accuracy_gain() |
|
965
|
|
|
1.4325259515570934 |
|
966
|
|
|
|
|
967
|
|
|
|
|
968
|
|
|
.. versionadded:: 0.1.0 |
|
969
|
|
|
|
|
970
|
1 |
|
""" |
|
971
|
1 |
|
try: |
|
972
|
|
|
random_accuracy = ( |
|
973
|
|
|
self.cond_pos_pop() / self.population() |
|
974
|
1 |
|
) ** 2 + (self.cond_neg_pop() / self.population()) ** 2 |
|
975
|
1 |
|
return self.accuracy() / random_accuracy |
|
976
|
1 |
|
except ZeroDivisionError: |
|
977
|
|
|
return float('nan') |
|
978
|
1 |
|
|
|
979
|
|
|
def balanced_accuracy(self) -> float: |
|
980
|
|
|
r"""Return balanced accuracy. |
|
981
|
|
|
|
|
982
|
|
|
Balanced accuracy is defined as |
|
983
|
|
|
|
|
984
|
|
|
.. math:: |
|
985
|
|
|
|
|
986
|
|
|
\frac{sensitivity + specificity}{2} |
|
987
|
|
|
|
|
988
|
|
|
Cf. https://en.wikipedia.org/wiki/Accuracy |
|
989
|
|
|
|
|
990
|
|
|
Returns |
|
991
|
|
|
------- |
|
992
|
|
|
float |
|
993
|
|
|
The balanced accuracy of the confusion table |
|
994
|
|
|
|
|
995
|
|
|
Example |
|
996
|
|
|
------- |
|
997
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
998
|
|
|
>>> ct.balanced_accuracy() |
|
999
|
|
|
0.775 |
|
1000
|
|
|
|
|
1001
|
|
|
|
|
1002
|
|
|
.. versionadded:: 0.1.0 |
|
1003
|
|
|
|
|
1004
|
1 |
|
""" |
|
1005
|
|
|
return 0.5 * (self.recall() + self.specificity()) |
|
1006
|
1 |
|
|
|
1007
|
|
|
def error_rate(self) -> float: |
|
1008
|
|
|
r"""Return error rate. |
|
1009
|
|
|
|
|
1010
|
|
|
Error rate is defined as |
|
1011
|
|
|
|
|
1012
|
|
|
.. math:: |
|
1013
|
|
|
|
|
1014
|
|
|
\frac{fp + fn}{population} |
|
1015
|
|
|
|
|
1016
|
|
|
Returns |
|
1017
|
|
|
------- |
|
1018
|
|
|
float |
|
1019
|
|
|
The error rate of the confusion table |
|
1020
|
|
|
|
|
1021
|
|
|
Example |
|
1022
|
|
|
------- |
|
1023
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1024
|
|
|
>>> ct.error_rate() |
|
1025
|
|
|
0.21739130434782608 |
|
1026
|
|
|
|
|
1027
|
|
|
|
|
1028
|
|
|
.. versionadded:: 0.4.0 |
|
1029
|
|
|
|
|
1030
|
1 |
|
""" |
|
1031
|
1 |
|
try: |
|
1032
|
|
|
return (self._fn + self._fp) / ( |
|
1033
|
|
|
self._fn + self._fp + self._tn + self._tp |
|
1034
|
1 |
|
) |
|
1035
|
1 |
|
except ZeroDivisionError: |
|
1036
|
|
|
return float('nan') |
|
1037
|
1 |
|
|
|
1038
|
|
|
def prevalence(self) -> float: |
|
1039
|
|
|
r"""Return prevalence. |
|
1040
|
|
|
|
|
1041
|
|
|
Prevalence is defined as |
|
1042
|
|
|
|
|
1043
|
|
|
.. math:: |
|
1044
|
|
|
|
|
1045
|
|
|
\frac{condition positive}{population} |
|
1046
|
|
|
|
|
1047
|
|
|
Cf. https://en.wikipedia.org/wiki/Prevalence |
|
1048
|
|
|
|
|
1049
|
|
|
Returns |
|
1050
|
|
|
------- |
|
1051
|
|
|
float |
|
1052
|
|
|
The prevelence of the confusion table |
|
1053
|
|
|
|
|
1054
|
|
|
Example |
|
1055
|
|
|
------- |
|
1056
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1057
|
|
|
>>> ct.prevalence() |
|
1058
|
|
|
0.6521739130434783 |
|
1059
|
|
|
|
|
1060
|
|
|
|
|
1061
|
|
|
.. versionadded:: 0.4.0 |
|
1062
|
|
|
|
|
1063
|
1 |
|
""" |
|
1064
|
1 |
|
try: |
|
1065
|
1 |
|
return self.cond_pos_pop() / self.population() |
|
1066
|
1 |
|
except ZeroDivisionError: |
|
1067
|
|
|
return float('nan') |
|
1068
|
1 |
|
|
|
1069
|
|
|
def informedness(self) -> float: |
|
1070
|
|
|
"""Return informedness. |
|
1071
|
|
|
|
|
1072
|
|
|
Informedness is defined as |
|
1073
|
|
|
|
|
1074
|
|
|
.. math:: |
|
1075
|
|
|
|
|
1076
|
|
|
sensitivity + specificity - 1 |
|
1077
|
|
|
|
|
1078
|
|
|
AKA Youden's J statistic (:cite:`Youden:1950`) |
|
1079
|
|
|
|
|
1080
|
|
|
AKA DeltaP' |
|
1081
|
|
|
|
|
1082
|
|
|
Cf. https://en.wikipedia.org/wiki/Youden%27s_J_statistic |
|
1083
|
|
|
|
|
1084
|
|
|
Returns |
|
1085
|
|
|
------- |
|
1086
|
|
|
float |
|
1087
|
|
|
The informedness of the confusion table |
|
1088
|
|
|
|
|
1089
|
|
|
Example |
|
1090
|
|
|
------- |
|
1091
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1092
|
|
|
>>> ct.informedness() |
|
1093
|
|
|
0.55 |
|
1094
|
|
|
|
|
1095
|
|
|
|
|
1096
|
|
|
.. versionadded:: 0.1.0 |
|
1097
|
|
|
|
|
1098
|
1 |
|
""" |
|
1099
|
|
|
return self.recall() + self.specificity() - 1 |
|
1100
|
1 |
|
|
|
1101
|
|
|
def markedness(self) -> float: |
|
1102
|
|
|
"""Return markedness. |
|
1103
|
|
|
|
|
1104
|
|
|
Markedness is defined as |
|
1105
|
|
|
|
|
1106
|
|
|
.. math:: |
|
1107
|
|
|
|
|
1108
|
|
|
precision + npv - 1 |
|
1109
|
|
|
|
|
1110
|
|
|
AKA DeltaP |
|
1111
|
|
|
|
|
1112
|
|
|
Returns |
|
1113
|
|
|
------- |
|
1114
|
|
|
float |
|
1115
|
|
|
The markedness of the confusion table |
|
1116
|
|
|
|
|
1117
|
|
|
Example |
|
1118
|
|
|
------- |
|
1119
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1120
|
|
|
>>> ct.markedness() |
|
1121
|
|
|
0.5238095238095237 |
|
1122
|
|
|
|
|
1123
|
|
|
|
|
1124
|
|
|
.. versionadded:: 0.1.0 |
|
1125
|
|
|
|
|
1126
|
1 |
|
""" |
|
1127
|
|
|
return self.precision() + self.npv() - 1 |
|
1128
|
1 |
|
|
|
1129
|
|
|
def pr_amean(self) -> float: |
|
1130
|
|
|
r"""Return arithmetic mean of precision & recall. |
|
1131
|
|
|
|
|
1132
|
|
|
The arithmetic mean of precision and recall is defined as |
|
1133
|
|
|
|
|
1134
|
|
|
.. math:: |
|
1135
|
|
|
|
|
1136
|
|
|
\frac{precision \cdot recall}{2} |
|
1137
|
|
|
|
|
1138
|
|
|
Cf. https://en.wikipedia.org/wiki/Arithmetic_mean |
|
1139
|
|
|
|
|
1140
|
|
|
Returns |
|
1141
|
|
|
------- |
|
1142
|
|
|
float |
|
1143
|
|
|
The arithmetic mean of the confusion table's precision & recall |
|
1144
|
|
|
|
|
1145
|
|
|
Example |
|
1146
|
|
|
------- |
|
1147
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1148
|
|
|
>>> ct.pr_amean() |
|
1149
|
|
|
0.8285714285714285 |
|
1150
|
|
|
|
|
1151
|
|
|
|
|
1152
|
|
|
.. versionadded:: 0.1.0 |
|
1153
|
|
|
|
|
1154
|
1 |
|
""" |
|
1155
|
|
|
return amean((self.precision(), self.recall())) |
|
1156
|
1 |
|
|
|
1157
|
|
|
def pr_gmean(self) -> float: |
|
1158
|
|
|
r"""Return geometric mean of precision & recall. |
|
1159
|
|
|
|
|
1160
|
|
|
The geometric mean of precision and recall is defined as: |
|
1161
|
|
|
|
|
1162
|
|
|
.. math:: |
|
1163
|
|
|
|
|
1164
|
|
|
\sqrt{precision \cdot recall} |
|
1165
|
|
|
|
|
1166
|
|
|
Cf. https://en.wikipedia.org/wiki/Geometric_mean |
|
1167
|
|
|
|
|
1168
|
|
|
Returns |
|
1169
|
|
|
------- |
|
1170
|
|
|
float |
|
1171
|
|
|
The geometric mean of the confusion table's precision & recall |
|
1172
|
|
|
|
|
1173
|
|
|
Example |
|
1174
|
|
|
------- |
|
1175
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1176
|
|
|
>>> ct.pr_gmean() |
|
1177
|
|
|
0.828078671210825 |
|
1178
|
|
|
|
|
1179
|
|
|
|
|
1180
|
|
|
.. versionadded:: 0.1.0 |
|
1181
|
|
|
|
|
1182
|
1 |
|
""" |
|
1183
|
|
|
return gmean((self.precision(), self.recall())) |
|
1184
|
1 |
|
|
|
1185
|
|
|
def pr_hmean(self) -> float: |
|
1186
|
|
|
r"""Return harmonic mean of precision & recall. |
|
1187
|
|
|
|
|
1188
|
|
|
The harmonic mean of precision and recall is defined as |
|
1189
|
|
|
|
|
1190
|
|
|
.. math:: |
|
1191
|
|
|
|
|
1192
|
|
|
\frac{2 \cdot precision \cdot recall}{precision + recall} |
|
1193
|
|
|
|
|
1194
|
|
|
Cf. https://en.wikipedia.org/wiki/Harmonic_mean |
|
1195
|
|
|
|
|
1196
|
|
|
Returns |
|
1197
|
|
|
------- |
|
1198
|
|
|
float |
|
1199
|
|
|
The harmonic mean of the confusion table's precision & recall |
|
1200
|
|
|
|
|
1201
|
|
|
Example |
|
1202
|
|
|
------- |
|
1203
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1204
|
|
|
>>> ct.pr_hmean() |
|
1205
|
|
|
0.8275862068965516 |
|
1206
|
|
|
|
|
1207
|
|
|
|
|
1208
|
|
|
.. versionadded:: 0.1.0 |
|
1209
|
|
|
|
|
1210
|
1 |
|
""" |
|
1211
|
|
|
return hmean((self.precision(), self.recall())) |
|
1212
|
1 |
|
|
|
1213
|
|
|
def pr_qmean(self) -> float: |
|
1214
|
|
|
r"""Return quadratic mean of precision & recall. |
|
1215
|
|
|
|
|
1216
|
|
|
The quadratic mean of precision and recall is defined as |
|
1217
|
|
|
|
|
1218
|
|
|
.. math:: |
|
1219
|
|
|
|
|
1220
|
|
|
\sqrt{\frac{precision^{2} + recall^{2}}{2}} |
|
1221
|
|
|
|
|
1222
|
|
|
Cf. https://en.wikipedia.org/wiki/Quadratic_mean |
|
1223
|
|
|
|
|
1224
|
|
|
Returns |
|
1225
|
|
|
------- |
|
1226
|
|
|
float |
|
1227
|
|
|
The quadratic mean of the confusion table's precision & recall |
|
1228
|
|
|
|
|
1229
|
|
|
Example |
|
1230
|
|
|
------- |
|
1231
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1232
|
|
|
>>> ct.pr_qmean() |
|
1233
|
|
|
0.8290638930598233 |
|
1234
|
|
|
|
|
1235
|
|
|
|
|
1236
|
|
|
.. versionadded:: 0.1.0 |
|
1237
|
|
|
|
|
1238
|
1 |
|
""" |
|
1239
|
|
|
return qmean((self.precision(), self.recall())) |
|
1240
|
1 |
|
|
|
1241
|
|
|
def pr_cmean(self) -> float: |
|
1242
|
|
|
r"""Return contraharmonic mean of precision & recall. |
|
1243
|
|
|
|
|
1244
|
|
|
The contraharmonic mean is |
|
1245
|
|
|
|
|
1246
|
|
|
.. math:: |
|
1247
|
|
|
|
|
1248
|
|
|
\frac{precision^{2} + recall^{2}}{precision + recall} |
|
1249
|
|
|
|
|
1250
|
|
|
Cf. https://en.wikipedia.org/wiki/Contraharmonic_mean |
|
1251
|
|
|
|
|
1252
|
|
|
Returns |
|
1253
|
|
|
------- |
|
1254
|
|
|
float |
|
1255
|
|
|
The contraharmonic mean of the confusion table's precision & recall |
|
1256
|
|
|
|
|
1257
|
|
|
Example |
|
1258
|
|
|
------- |
|
1259
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1260
|
|
|
>>> ct.pr_cmean() |
|
1261
|
|
|
0.8295566502463055 |
|
1262
|
|
|
|
|
1263
|
|
|
|
|
1264
|
|
|
.. versionadded:: 0.1.0 |
|
1265
|
|
|
|
|
1266
|
1 |
|
""" |
|
1267
|
|
|
return cmean((self.precision(), self.recall())) |
|
1268
|
1 |
|
|
|
1269
|
|
|
def pr_lmean(self) -> float: |
|
1270
|
|
|
r"""Return logarithmic mean of precision & recall. |
|
1271
|
|
|
|
|
1272
|
|
|
The logarithmic mean is: |
|
1273
|
|
|
0 if either precision or recall is 0, |
|
1274
|
|
|
the precision if they are equal, |
|
1275
|
|
|
otherwise |
|
1276
|
|
|
|
|
1277
|
|
|
.. math:: |
|
1278
|
|
|
|
|
1279
|
|
|
\frac{precision - recall} |
|
1280
|
|
|
{ln(precision) - ln(recall)} |
|
1281
|
|
|
|
|
1282
|
|
|
Cf. https://en.wikipedia.org/wiki/Logarithmic_mean |
|
1283
|
|
|
|
|
1284
|
|
|
Returns |
|
1285
|
|
|
------- |
|
1286
|
|
|
float |
|
1287
|
|
|
The logarithmic mean of the confusion table's precision & recall |
|
1288
|
|
|
|
|
1289
|
|
|
Example |
|
1290
|
|
|
------- |
|
1291
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1292
|
|
|
>>> ct.pr_lmean() |
|
1293
|
|
|
0.8282429171492667 |
|
1294
|
|
|
|
|
1295
|
|
|
|
|
1296
|
|
|
.. versionadded:: 0.1.0 |
|
1297
|
|
|
|
|
1298
|
1 |
|
""" |
|
1299
|
1 |
|
precision = self.precision() |
|
1300
|
1 |
|
recall = self.recall() |
|
1301
|
1 |
|
if not precision or not recall: |
|
1302
|
1 |
|
return 0.0 |
|
1303
|
1 |
|
elif precision == recall: |
|
1304
|
1 |
|
return precision |
|
1305
|
|
|
return (precision - recall) / (math.log(precision) - math.log(recall)) |
|
1306
|
1 |
|
|
|
1307
|
|
|
def pr_imean(self) -> float: |
|
1308
|
|
|
r"""Return identric (exponential) mean of precision & recall. |
|
1309
|
|
|
|
|
1310
|
|
|
The identric mean is: |
|
1311
|
|
|
precision if precision = recall, |
|
1312
|
|
|
otherwise |
|
1313
|
|
|
|
|
1314
|
|
|
.. math:: |
|
1315
|
|
|
|
|
1316
|
|
|
\frac{1}{e} \cdot |
|
1317
|
|
|
\sqrt[precision - recall]{\frac{precision^{precision}} |
|
1318
|
|
|
{recall^{recall}}} |
|
1319
|
|
|
|
|
1320
|
|
|
Cf. https://en.wikipedia.org/wiki/Identric_mean |
|
1321
|
|
|
|
|
1322
|
|
|
Returns |
|
1323
|
|
|
------- |
|
1324
|
|
|
float |
|
1325
|
|
|
The identric mean of the confusion table's precision & recall |
|
1326
|
|
|
|
|
1327
|
|
|
Example |
|
1328
|
|
|
------- |
|
1329
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1330
|
|
|
>>> ct.pr_imean() |
|
1331
|
|
|
0.8284071826325543 |
|
1332
|
|
|
|
|
1333
|
|
|
|
|
1334
|
|
|
.. versionadded:: 0.1.0 |
|
1335
|
|
|
|
|
1336
|
1 |
|
""" |
|
1337
|
|
|
return imean((self.precision(), self.recall())) |
|
1338
|
1 |
|
|
|
1339
|
|
|
def pr_seiffert_mean(self) -> float: |
|
1340
|
|
|
r"""Return Seiffert's mean of precision & recall. |
|
1341
|
|
|
|
|
1342
|
|
|
Seiffert's mean of precision and recall is |
|
1343
|
|
|
|
|
1344
|
|
|
.. math:: |
|
1345
|
|
|
|
|
1346
|
|
|
\frac{precision - recall}{4 \cdot arctan |
|
1347
|
|
|
\sqrt{\frac{precision}{recall}} - \pi} |
|
1348
|
|
|
|
|
1349
|
|
|
It is defined in :cite:`Seiffert:1993`. |
|
1350
|
|
|
|
|
1351
|
|
|
Returns |
|
1352
|
|
|
------- |
|
1353
|
|
|
float |
|
1354
|
|
|
Seiffert's mean of the confusion table's precision & recall |
|
1355
|
|
|
|
|
1356
|
|
|
Example |
|
1357
|
|
|
------- |
|
1358
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1359
|
|
|
>>> ct.pr_seiffert_mean() |
|
1360
|
|
|
0.8284071696048312 |
|
1361
|
|
|
|
|
1362
|
|
|
|
|
1363
|
|
|
.. versionadded:: 0.1.0 |
|
1364
|
|
|
|
|
1365
|
1 |
|
""" |
|
1366
|
|
|
return seiffert_mean((self.precision(), self.recall())) |
|
1367
|
1 |
|
|
|
1368
|
|
|
def pr_lehmer_mean(self, exp: float = 2.0) -> float: |
|
1369
|
|
|
r"""Return Lehmer mean of precision & recall. |
|
1370
|
|
|
|
|
1371
|
|
|
The Lehmer mean is |
|
1372
|
|
|
|
|
1373
|
|
|
.. math:: |
|
1374
|
|
|
|
|
1375
|
|
|
\frac{precision^{exp} + recall^{exp}} |
|
1376
|
|
|
{precision^{exp-1} + recall^{exp-1}} |
|
1377
|
|
|
|
|
1378
|
|
|
Cf. https://en.wikipedia.org/wiki/Lehmer_mean |
|
1379
|
|
|
|
|
1380
|
|
|
Parameters |
|
1381
|
|
|
---------- |
|
1382
|
|
|
exp : float |
|
1383
|
|
|
The exponent of the Lehmer mean |
|
1384
|
|
|
|
|
1385
|
|
|
Returns |
|
1386
|
|
|
------- |
|
1387
|
|
|
float |
|
1388
|
|
|
The Lehmer mean for the given exponent of the confusion table's |
|
1389
|
|
|
precision & recall |
|
1390
|
|
|
|
|
1391
|
|
|
Example |
|
1392
|
|
|
------- |
|
1393
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1394
|
|
|
>>> ct.pr_lehmer_mean() |
|
1395
|
|
|
0.8295566502463055 |
|
1396
|
|
|
|
|
1397
|
|
|
|
|
1398
|
|
|
.. versionadded:: 0.1.0 |
|
1399
|
|
|
|
|
1400
|
1 |
|
""" |
|
1401
|
|
|
return lehmer_mean((self.precision(), self.recall()), exp) |
|
1402
|
1 |
|
|
|
1403
|
|
|
def pr_heronian_mean(self) -> float: |
|
1404
|
|
|
r"""Return Heronian mean of precision & recall. |
|
1405
|
|
|
|
|
1406
|
|
|
The Heronian mean of precision and recall is defined as |
|
1407
|
|
|
|
|
1408
|
|
|
.. math:: |
|
1409
|
|
|
|
|
1410
|
|
|
\frac{precision + \sqrt{precision \cdot recall} + recall}{3} |
|
1411
|
|
|
|
|
1412
|
|
|
Cf. https://en.wikipedia.org/wiki/Heronian_mean |
|
1413
|
|
|
|
|
1414
|
|
|
Returns |
|
1415
|
|
|
------- |
|
1416
|
|
|
float |
|
1417
|
|
|
The Heronian mean of the confusion table's precision & recall |
|
1418
|
|
|
|
|
1419
|
|
|
Example |
|
1420
|
|
|
------- |
|
1421
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1422
|
|
|
>>> ct.pr_heronian_mean() |
|
1423
|
|
|
0.8284071761178939 |
|
1424
|
|
|
|
|
1425
|
|
|
|
|
1426
|
|
|
.. versionadded:: 0.1.0 |
|
1427
|
|
|
|
|
1428
|
1 |
|
""" |
|
1429
|
|
|
return heronian_mean((self.precision(), self.recall())) |
|
1430
|
1 |
|
|
|
1431
|
|
|
def pr_hoelder_mean(self, exp: float = 2.0) -> float: |
|
1432
|
|
|
r"""Return Hölder (power/generalized) mean of precision & recall. |
|
1433
|
|
|
|
|
1434
|
|
|
The power mean of precision and recall is defined as |
|
1435
|
|
|
|
|
1436
|
|
|
.. math:: |
|
1437
|
|
|
|
|
1438
|
|
|
\frac{1}{2} \cdot |
|
1439
|
|
|
\sqrt[exp]{precision^{exp} + recall^{exp}} |
|
1440
|
|
|
|
|
1441
|
|
|
for :math:`exp \ne 0`, and the geometric mean for :math:`exp = 0` |
|
1442
|
|
|
|
|
1443
|
|
|
Cf. https://en.wikipedia.org/wiki/Generalized_mean |
|
1444
|
|
|
|
|
1445
|
|
|
Parameters |
|
1446
|
|
|
---------- |
|
1447
|
|
|
exp : float |
|
1448
|
|
|
The exponent of the Hölder mean |
|
1449
|
|
|
|
|
1450
|
|
|
Returns |
|
1451
|
|
|
------- |
|
1452
|
|
|
float |
|
1453
|
|
|
The Hölder mean for the given exponent of the confusion table's |
|
1454
|
|
|
precision & recall |
|
1455
|
|
|
|
|
1456
|
|
|
Example |
|
1457
|
|
|
------- |
|
1458
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1459
|
|
|
>>> ct.pr_hoelder_mean() |
|
1460
|
|
|
0.8290638930598233 |
|
1461
|
|
|
|
|
1462
|
|
|
|
|
1463
|
|
|
.. versionadded:: 0.1.0 |
|
1464
|
|
|
|
|
1465
|
1 |
|
""" |
|
1466
|
|
|
return hoelder_mean((self.precision(), self.recall()), exp) |
|
1467
|
1 |
|
|
|
1468
|
|
|
def pr_agmean(self) -> float: |
|
1469
|
|
|
"""Return arithmetic-geometric mean of precision & recall. |
|
1470
|
|
|
|
|
1471
|
|
|
Iterates between arithmetic & geometric means until they converge to |
|
1472
|
|
|
a single value (rounded to 12 digits) |
|
1473
|
|
|
|
|
1474
|
|
|
Cf. https://en.wikipedia.org/wiki/Arithmetic-geometric_mean |
|
1475
|
|
|
|
|
1476
|
|
|
Returns |
|
1477
|
|
|
------- |
|
1478
|
|
|
float |
|
1479
|
|
|
The arithmetic-geometric mean of the confusion table's precision & |
|
1480
|
|
|
recall |
|
1481
|
|
|
|
|
1482
|
|
|
Example |
|
1483
|
|
|
------- |
|
1484
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1485
|
|
|
>>> ct.pr_agmean() |
|
1486
|
|
|
0.8283250315702829 |
|
1487
|
|
|
|
|
1488
|
|
|
|
|
1489
|
|
|
.. versionadded:: 0.1.0 |
|
1490
|
|
|
|
|
1491
|
1 |
|
""" |
|
1492
|
|
|
return agmean((self.precision(), self.recall())) |
|
1493
|
1 |
|
|
|
1494
|
|
|
def pr_ghmean(self) -> float: |
|
1495
|
|
|
"""Return geometric-harmonic mean of precision & recall. |
|
1496
|
|
|
|
|
1497
|
|
|
Iterates between geometric & harmonic means until they converge to |
|
1498
|
|
|
a single value (rounded to 12 digits) |
|
1499
|
|
|
|
|
1500
|
|
|
Cf. https://en.wikipedia.org/wiki/Geometric-harmonic_mean |
|
1501
|
|
|
|
|
1502
|
|
|
Returns |
|
1503
|
|
|
------- |
|
1504
|
|
|
float |
|
1505
|
|
|
The geometric-harmonic mean of the confusion table's precision & |
|
1506
|
|
|
recall |
|
1507
|
|
|
|
|
1508
|
|
|
Example |
|
1509
|
|
|
------- |
|
1510
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1511
|
|
|
>>> ct.pr_ghmean() |
|
1512
|
|
|
0.8278323841238441 |
|
1513
|
|
|
|
|
1514
|
|
|
|
|
1515
|
|
|
.. versionadded:: 0.1.0 |
|
1516
|
|
|
|
|
1517
|
1 |
|
""" |
|
1518
|
|
|
return ghmean((self.precision(), self.recall())) |
|
1519
|
1 |
|
|
|
1520
|
|
|
def pr_aghmean(self) -> float: |
|
1521
|
|
|
"""Return arithmetic-geometric-harmonic mean of precision & recall. |
|
1522
|
|
|
|
|
1523
|
|
|
Iterates over arithmetic, geometric, & harmonic means until they |
|
1524
|
|
|
converge to a single value (rounded to 12 digits), following the |
|
1525
|
|
|
method described in :cite:`Raissouli:2009`. |
|
1526
|
|
|
|
|
1527
|
|
|
Returns |
|
1528
|
|
|
------- |
|
1529
|
|
|
float |
|
1530
|
|
|
The arithmetic-geometric-harmonic mean of the confusion table's |
|
1531
|
|
|
precision & recall |
|
1532
|
|
|
|
|
1533
|
|
|
Example |
|
1534
|
|
|
------- |
|
1535
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1536
|
|
|
>>> ct.pr_aghmean() |
|
1537
|
|
|
0.8280786712108288 |
|
1538
|
|
|
|
|
1539
|
|
|
|
|
1540
|
|
|
.. versionadded:: 0.1.0 |
|
1541
|
|
|
|
|
1542
|
1 |
|
""" |
|
1543
|
|
|
return aghmean((self.precision(), self.recall())) |
|
1544
|
1 |
|
|
|
1545
|
|
|
def fbeta_score(self, beta: float = 1.0) -> float: |
|
1546
|
|
|
r"""Return :math:`F_{\beta}` score. |
|
1547
|
|
|
|
|
1548
|
|
|
:math:`F_{\beta}` for a positive real value :math:`\beta` "measures |
|
1549
|
|
|
the effectiveness of retrieval with respect to a user who |
|
1550
|
|
|
attaches :math:`\beta` times as much importance to recall as |
|
1551
|
|
|
precision" (van Rijsbergen 1979) |
|
1552
|
|
|
|
|
1553
|
|
|
:math:`F_{\beta}` score is defined as |
|
1554
|
|
|
|
|
1555
|
|
|
.. math:: |
|
1556
|
|
|
|
|
1557
|
|
|
(1 + \beta^2) \cdot \frac{precision \cdot recall} |
|
1558
|
|
|
{((\beta^2 \cdot precision) + recall)} |
|
1559
|
|
|
|
|
1560
|
|
|
Cf. https://en.wikipedia.org/wiki/F1_score |
|
1561
|
|
|
|
|
1562
|
|
|
Parameters |
|
1563
|
|
|
---------- |
|
1564
|
|
|
beta : float |
|
1565
|
|
|
The :math:`\beta` parameter in the above formula |
|
1566
|
|
|
|
|
1567
|
|
|
Returns |
|
1568
|
|
|
------- |
|
1569
|
|
|
float |
|
1570
|
|
|
The :math:`F_{\beta}` of the confusion table |
|
1571
|
|
|
|
|
1572
|
|
|
Raises |
|
1573
|
|
|
------ |
|
1574
|
|
|
AttributeError |
|
1575
|
|
|
Beta must be a positive real value |
|
1576
|
|
|
|
|
1577
|
|
|
Examples |
|
1578
|
|
|
-------- |
|
1579
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1580
|
|
|
>>> ct.fbeta_score() |
|
1581
|
|
|
0.8275862068965518 |
|
1582
|
|
|
>>> ct.fbeta_score(beta=0.1) |
|
1583
|
|
|
0.8565371024734982 |
|
1584
|
|
|
|
|
1585
|
|
|
|
|
1586
|
|
|
.. versionadded:: 0.1.0 |
|
1587
|
|
|
|
|
1588
|
1 |
|
""" |
|
1589
|
1 |
|
if beta <= 0.0: |
|
1590
|
1 |
|
raise AttributeError('Beta must be a positive real value.') |
|
1591
|
1 |
|
precision = self.precision() |
|
1592
|
1 |
|
recall = self.recall() |
|
1593
|
|
|
return ( |
|
1594
|
|
|
(1.0 + beta ** 2) |
|
1595
|
|
|
* precision |
|
1596
|
|
|
* recall |
|
1597
|
|
|
/ ((beta ** 2 * precision) + recall) |
|
1598
|
|
|
) |
|
1599
|
1 |
|
|
|
1600
|
|
|
def f2_score(self) -> float: |
|
1601
|
|
|
"""Return :math:`F_{2}`. |
|
1602
|
|
|
|
|
1603
|
|
|
The :math:`F_{2}` score emphasizes recall over precision in comparison |
|
1604
|
|
|
to the :math:`F_{1}` score |
|
1605
|
|
|
|
|
1606
|
|
|
Cf. https://en.wikipedia.org/wiki/F1_score |
|
1607
|
|
|
|
|
1608
|
|
|
Returns |
|
1609
|
|
|
------- |
|
1610
|
|
|
float |
|
1611
|
|
|
The :math:`F_{2}` of the confusion table |
|
1612
|
|
|
|
|
1613
|
|
|
Example |
|
1614
|
|
|
------- |
|
1615
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1616
|
|
|
>>> ct.f2_score() |
|
1617
|
|
|
0.8108108108108109 |
|
1618
|
|
|
|
|
1619
|
|
|
|
|
1620
|
|
|
.. versionadded:: 0.1.0 |
|
1621
|
|
|
|
|
1622
|
1 |
|
""" |
|
1623
|
|
|
return self.fbeta_score(2.0) |
|
1624
|
1 |
|
|
|
1625
|
|
|
def fhalf_score(self) -> float: |
|
1626
|
|
|
"""Return :math:`F_{0.5}` score. |
|
1627
|
|
|
|
|
1628
|
|
|
The :math:`F_{0.5}` score emphasizes precision over recall in |
|
1629
|
|
|
comparison to the :math:`F_{1}` score |
|
1630
|
|
|
|
|
1631
|
|
|
Cf. https://en.wikipedia.org/wiki/F1_score |
|
1632
|
|
|
|
|
1633
|
|
|
Returns |
|
1634
|
|
|
------- |
|
1635
|
|
|
float |
|
1636
|
|
|
The :math:`F_{0.5}` score of the confusion table |
|
1637
|
|
|
|
|
1638
|
|
|
Example |
|
1639
|
|
|
------- |
|
1640
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1641
|
|
|
>>> ct.fhalf_score() |
|
1642
|
|
|
0.8450704225352114 |
|
1643
|
|
|
|
|
1644
|
|
|
|
|
1645
|
|
|
.. versionadded:: 0.1.0 |
|
1646
|
|
|
|
|
1647
|
1 |
|
""" |
|
1648
|
|
|
return self.fbeta_score(0.5) |
|
1649
|
1 |
|
|
|
1650
|
|
|
def e_score(self, beta: float = 1.0) -> float: |
|
1651
|
|
|
r"""Return :math:`E`-score. |
|
1652
|
|
|
|
|
1653
|
|
|
This is Van Rijsbergen's effectiveness measure: |
|
1654
|
|
|
:math:`E=1-F_{\beta}`. |
|
1655
|
|
|
|
|
1656
|
|
|
Cf. https://en.wikipedia.org/wiki/Information_retrieval#F-measure |
|
1657
|
|
|
|
|
1658
|
|
|
Parameters |
|
1659
|
|
|
---------- |
|
1660
|
|
|
beta : float |
|
1661
|
|
|
The :math:`\beta` parameter in the above formula |
|
1662
|
|
|
|
|
1663
|
|
|
Returns |
|
1664
|
|
|
------- |
|
1665
|
|
|
float |
|
1666
|
|
|
The :math:`E`-score of the confusion table |
|
1667
|
|
|
|
|
1668
|
|
|
Example |
|
1669
|
|
|
------- |
|
1670
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1671
|
|
|
>>> ct.e_score() |
|
1672
|
|
|
0.17241379310344818 |
|
1673
|
|
|
|
|
1674
|
|
|
|
|
1675
|
|
|
.. versionadded:: 0.1.0 |
|
1676
|
|
|
|
|
1677
|
1 |
|
""" |
|
1678
|
|
|
return 1.0 - self.fbeta_score(beta) |
|
1679
|
1 |
|
|
|
1680
|
|
|
def f1_score(self) -> float: |
|
1681
|
|
|
r"""Return :math:`F_{1}` score. |
|
1682
|
|
|
|
|
1683
|
|
|
:math:`F_{1}` score is the harmonic mean of precision and recall |
|
1684
|
|
|
|
|
1685
|
|
|
.. math:: |
|
1686
|
|
|
|
|
1687
|
|
|
2 \cdot \frac{precision \cdot recall}{precision + recall} |
|
1688
|
|
|
|
|
1689
|
|
|
Cf. https://en.wikipedia.org/wiki/F1_score |
|
1690
|
|
|
|
|
1691
|
|
|
Returns |
|
1692
|
|
|
------- |
|
1693
|
|
|
float |
|
1694
|
|
|
The :math:`F_{1}` of the confusion table |
|
1695
|
|
|
|
|
1696
|
|
|
Example |
|
1697
|
|
|
------- |
|
1698
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1699
|
|
|
>>> ct.f1_score() |
|
1700
|
|
|
0.8275862068965518 |
|
1701
|
|
|
|
|
1702
|
|
|
|
|
1703
|
|
|
.. versionadded:: 0.1.0 |
|
1704
|
|
|
|
|
1705
|
1 |
|
""" |
|
1706
|
|
|
return self.fbeta_score(1.0) |
|
1707
|
1 |
|
|
|
1708
|
|
|
def jaccard(self) -> float: |
|
1709
|
|
|
r"""Return Jaccard index. |
|
1710
|
|
|
|
|
1711
|
|
|
The Jaccard index of a confusion table is |
|
1712
|
|
|
|
|
1713
|
|
|
.. math:: |
|
1714
|
|
|
|
|
1715
|
|
|
\frac{tp}{tp+fp+fn} |
|
1716
|
|
|
|
|
1717
|
|
|
Returns |
|
1718
|
|
|
------- |
|
1719
|
|
|
float |
|
1720
|
|
|
The Jaccard index of the confusion table |
|
1721
|
|
|
|
|
1722
|
|
|
Example |
|
1723
|
|
|
------- |
|
1724
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1725
|
|
|
>>> ct.jaccard() |
|
1726
|
|
|
0.7058823529411765 |
|
1727
|
|
|
|
|
1728
|
|
|
|
|
1729
|
|
|
.. versionadded:: 0.4.0 |
|
1730
|
|
|
|
|
1731
|
|
|
""" |
|
1732
|
|
|
try: |
|
1733
|
|
|
return self._tp / (self._tp + self._fp + self._fn) |
|
1734
|
|
|
except ZeroDivisionError: |
|
1735
|
|
|
return float('nan') |
|
1736
|
|
|
|
|
1737
|
|
|
def d_measure(self) -> float: |
|
1738
|
|
|
r"""Return D-measure. |
|
1739
|
1 |
|
|
|
1740
|
|
|
:math:`D`-measure is defined as |
|
1741
|
1 |
|
|
|
1742
|
|
|
.. math:: |
|
1743
|
|
|
|
|
1744
|
|
|
1-\frac{1}{\frac{1}{precision}+\frac{1}{recall}-1} |
|
1745
|
|
|
|
|
1746
|
|
|
Returns |
|
1747
|
|
|
------- |
|
1748
|
|
|
float |
|
1749
|
|
|
The :math:`D`-measure of the confusion table |
|
1750
|
|
|
|
|
1751
|
|
|
Examples |
|
1752
|
|
|
-------- |
|
1753
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1754
|
|
|
>>> ct.d_measure() |
|
1755
|
|
|
0.2941176470588237 |
|
1756
|
|
|
|
|
1757
|
|
|
|
|
1758
|
|
|
.. versionadded:: 0.4.0 |
|
1759
|
|
|
|
|
1760
|
|
|
""" |
|
1761
|
|
|
return 1.0 - ( |
|
1762
|
|
|
1.0 / (1.0 / self.precision() + 1.0 / self.recall() - 1.0) |
|
1763
|
|
|
) |
|
1764
|
|
|
|
|
1765
|
1 |
|
def mcc(self) -> float: |
|
1766
|
1 |
|
r"""Return Matthews correlation coefficient (MCC). |
|
1767
|
1 |
|
|
|
1768
|
1 |
|
The Matthews correlation coefficient is defined in |
|
1769
|
|
|
:cite:`Matthews:1975` as: |
|
1770
|
1 |
|
|
|
1771
|
|
|
.. math:: |
|
1772
|
|
|
|
|
1773
|
|
|
\frac{(tp \cdot tn) - (fp \cdot fn)} |
|
1774
|
|
|
{\sqrt{(tp + fp)(tp + fn)(tn + fp)(tn + fn)}} |
|
1775
|
|
|
|
|
1776
|
|
|
This is equivalent to the geometric mean of informedness and |
|
1777
|
|
|
markedness, defined above. |
|
1778
|
|
|
|
|
1779
|
|
|
Cf. https://en.wikipedia.org/wiki/Matthews_correlation_coefficient |
|
1780
|
|
|
|
|
1781
|
|
|
Returns |
|
1782
|
|
|
------- |
|
1783
|
|
|
float |
|
1784
|
|
|
The Matthews correlation coefficient of the confusion table |
|
1785
|
|
|
|
|
1786
|
|
|
Example |
|
1787
|
|
|
------- |
|
1788
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1789
|
|
|
>>> ct.mcc() |
|
1790
|
|
|
0.5367450401216932 |
|
1791
|
|
|
|
|
1792
|
|
|
|
|
1793
|
|
|
.. versionadded:: 0.1.0 |
|
1794
|
|
|
|
|
1795
|
|
|
""" |
|
1796
|
|
|
try: |
|
1797
|
|
|
return ((self._tp * self._tn) - (self._fp * self._fn)) / math.sqrt( |
|
1798
|
|
|
(self._tp + self._fp) |
|
1799
|
|
|
* (self._tp + self._fn) |
|
1800
|
|
|
* (self._tn + self._fp) |
|
1801
|
|
|
* (self._tn + self._fn) |
|
1802
|
|
|
) |
|
1803
|
|
|
except ZeroDivisionError: |
|
1804
|
|
|
return float('nan') |
|
1805
|
|
|
|
|
1806
|
|
|
def significance(self) -> float: |
|
1807
|
1 |
|
r"""Return the significance, :math:`\chi^{2}`. |
|
1808
|
|
|
|
|
1809
|
1 |
|
Significance is defined as |
|
1810
|
|
|
|
|
1811
|
|
|
.. math:: |
|
1812
|
|
|
|
|
1813
|
|
|
\chi^{2} = |
|
1814
|
|
|
\frac{(tp \cdot tn - fp \cdot fn)^{2} (tp + tn + fp + fn)} |
|
1815
|
|
|
{((tp + fp)(tp + fn)(tn + fp)(tn + fn)}` |
|
1816
|
|
|
|
|
1817
|
|
|
Also: :math:`\chi^{2} = MCC^{2} \cdot n` |
|
1818
|
|
|
|
|
1819
|
|
|
Cf. https://en.wikipedia.org/wiki/Pearson%27s_chi-square_test |
|
1820
|
|
|
|
|
1821
|
|
|
Returns |
|
1822
|
|
|
------- |
|
1823
|
|
|
float |
|
1824
|
|
|
The significance of the confusion table |
|
1825
|
|
|
|
|
1826
|
|
|
Example |
|
1827
|
|
|
------- |
|
1828
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1829
|
|
|
>>> ct.significance() |
|
1830
|
|
|
66.26190476190476 |
|
1831
|
|
|
|
|
1832
|
|
|
|
|
1833
|
1 |
|
.. versionadded:: 0.1.0 |
|
1834
|
|
|
|
|
1835
|
|
|
""" |
|
1836
|
|
|
try: |
|
1837
|
1 |
|
return ( |
|
1838
|
|
|
(self._tp * self._tn - self._fp * self._fn) ** 2 |
|
1839
|
|
|
* (self._tp + self._tn + self._fp + self._fn) |
|
1840
|
|
|
) / ( |
|
1841
|
|
|
(self._tp + self._fp) |
|
1842
|
|
|
* (self._tp + self._fn) |
|
1843
|
|
|
* (self._tn + self._fp) |
|
1844
|
|
|
* (self._tn + self._fn) |
|
1845
|
|
|
) |
|
1846
|
|
|
except ZeroDivisionError: |
|
1847
|
|
|
return float('nan') |
|
1848
|
|
|
|
|
1849
|
|
|
def kappa_statistic(self) -> float: |
|
1850
|
|
|
r"""Return κ statistic. |
|
1851
|
|
|
|
|
1852
|
|
|
The κ statistic is defined as |
|
1853
|
|
|
|
|
1854
|
|
|
.. math:: |
|
1855
|
|
|
|
|
1856
|
|
|
\kappa = \frac{accuracy - random~ accuracy} |
|
1857
|
|
|
{1 - random~ accuracy}` |
|
1858
|
|
|
|
|
1859
|
|
|
The κ statistic compares the performance of the classifier relative to |
|
1860
|
|
|
the performance of a random classifier. :math:`\kappa` = 0 indicates |
|
1861
|
|
|
performance identical to random. :math:`\kappa` = 1 indicates perfect |
|
1862
|
|
|
predictive success. :math:`\kappa` = -1 indicates perfect predictive |
|
1863
|
|
|
failure. |
|
1864
|
|
|
|
|
1865
|
|
|
Returns |
|
1866
|
|
|
------- |
|
1867
|
|
|
float |
|
1868
|
1 |
|
The κ statistic of the confusion table |
|
1869
|
1 |
|
|
|
1870
|
|
|
Example |
|
1871
|
|
|
------- |
|
1872
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1873
|
|
|
>>> ct.kappa_statistic() |
|
1874
|
|
|
0.5344129554655871 |
|
1875
|
1 |
|
|
|
1876
|
1 |
|
|
|
1877
|
|
|
.. versionadded:: 0.1.0 |
|
1878
|
1 |
|
|
|
1879
|
|
|
""" |
|
1880
|
|
|
try: |
|
1881
|
|
|
random_accuracy = ( |
|
1882
|
|
|
(self._tn + self._fp) * (self._tn + self._fn) |
|
1883
|
|
|
+ (self._fn + self._tp) * (self._fp + self._tp) |
|
1884
|
|
|
) / self.population() ** 2 |
|
1885
|
|
|
return (self.accuracy() - random_accuracy) / (1 - random_accuracy) |
|
1886
|
|
|
except ZeroDivisionError: |
|
1887
|
|
|
return float('nan') |
|
1888
|
|
|
|
|
1889
|
|
|
def phi_coefficient(self) -> float: |
|
1890
|
|
|
r"""Return φ coefficient. |
|
1891
|
|
|
|
|
1892
|
|
|
The :math:`\phi` coefficient is defined as |
|
1893
|
|
|
|
|
1894
|
|
|
.. math:: |
|
1895
|
|
|
|
|
1896
|
|
|
\phi = \frac{tp \cdot tn - fp \cdot tn} |
|
1897
|
|
|
{\sqrt{(tp + fp) \cdot (tp + fn) \cdot (tn + fp) \cdot |
|
1898
|
|
|
(tn + fn)}} |
|
1899
|
|
|
|
|
1900
|
|
|
Returns |
|
1901
|
|
|
------- |
|
1902
|
|
|
float |
|
1903
|
|
|
The φ coefficient of the confusion table |
|
1904
|
|
|
|
|
1905
|
|
|
Example |
|
1906
|
|
|
------- |
|
1907
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1908
|
1 |
|
>>> ct.phi_coefficient() |
|
1909
|
1 |
|
0.5367450401216932 |
|
1910
|
|
|
|
|
1911
|
|
|
|
|
1912
|
|
|
.. versionadded:: 0.4.0 |
|
1913
|
|
|
|
|
1914
|
|
|
""" |
|
1915
|
|
|
try: |
|
1916
|
|
|
return ((self._tp * self._tn) - (self._fp * self._fn)) / ( |
|
1917
|
|
|
(self._tp + self._fn) |
|
1918
|
1 |
|
* (self._tp + self._fp) |
|
1919
|
1 |
|
* (self._tn + self._fn) |
|
1920
|
|
|
* (self._tn + self._fp) |
|
1921
|
1 |
|
) ** 0.5 |
|
1922
|
|
|
except ZeroDivisionError: |
|
1923
|
|
|
return float('nan') |
|
1924
|
|
|
|
|
1925
|
|
|
def joint_entropy(self) -> float: |
|
1926
|
|
|
"""Return the joint entropy. |
|
1927
|
|
|
|
|
1928
|
|
|
Implementation based on https://github.com/Magnetic/proficiency-metric |
|
1929
|
|
|
|
|
1930
|
|
|
Returns |
|
1931
|
|
|
------- |
|
1932
|
|
|
float |
|
1933
|
|
|
The joint entropy of the confusion table |
|
1934
|
|
|
|
|
1935
|
|
|
Example |
|
1936
|
|
|
------- |
|
1937
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1938
|
|
|
>>> ct.joint_entropy() |
|
1939
|
|
|
1.1680347446270396 |
|
1940
|
|
|
|
|
1941
|
|
|
|
|
1942
|
|
|
.. versionadded:: 0.4.0 |
|
1943
|
|
|
|
|
1944
|
|
|
""" |
|
1945
|
|
|
try: |
|
1946
|
|
|
return ( |
|
1947
|
|
|
math.log(self.population()) |
|
1948
|
|
|
- sum(_ * math.log(_) for _ in self.to_tuple()) |
|
1949
|
|
|
/ self.population() |
|
1950
|
|
|
) |
|
1951
|
|
|
except ValueError: |
|
1952
|
1 |
|
return float('nan') |
|
1953
|
1 |
|
|
|
1954
|
|
|
def actual_entropy(self) -> float: |
|
1955
|
|
|
"""Return the actual entropy. |
|
1956
|
|
|
|
|
1957
|
1 |
|
Implementation based on https://github.com/Magnetic/proficiency-metric |
|
1958
|
1 |
|
|
|
1959
|
1 |
|
Returns |
|
1960
|
|
|
------- |
|
1961
|
1 |
|
float |
|
1962
|
|
|
The actual entropy of the confusion table |
|
1963
|
|
|
|
|
1964
|
|
|
Example |
|
1965
|
|
|
------- |
|
1966
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1967
|
|
|
>>> ct.actual_entropy() |
|
1968
|
|
|
0.6460905050608101 |
|
1969
|
|
|
|
|
1970
|
|
|
|
|
1971
|
|
|
.. versionadded:: 0.4.0 |
|
1972
|
|
|
|
|
1973
|
|
|
""" |
|
1974
|
|
|
try: |
|
1975
|
|
|
return ( |
|
1976
|
|
|
math.log(self.population()) |
|
1977
|
|
|
- sum( |
|
1978
|
|
|
_ * math.log(_) |
|
1979
|
|
|
for _ in (self.cond_pos_pop(), self.cond_neg_pop()) |
|
1980
|
|
|
) |
|
1981
|
|
|
/ self.population() |
|
1982
|
|
|
) |
|
1983
|
|
|
except ValueError: |
|
1984
|
|
|
return float('nan') |
|
1985
|
|
|
|
|
1986
|
|
|
def predicted_entropy(self) -> float: |
|
1987
|
1 |
|
"""Return the predicted entropy. |
|
1988
|
1 |
|
|
|
1989
|
|
|
Implementation based on https://github.com/Magnetic/proficiency-metric |
|
1990
|
|
|
|
|
1991
|
|
|
Returns |
|
1992
|
|
|
------- |
|
1993
|
|
|
float |
|
1994
|
1 |
|
The predicted entropy of the confusion table |
|
1995
|
1 |
|
|
|
1996
|
|
|
Example |
|
1997
|
1 |
|
------- |
|
1998
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
1999
|
|
|
>>> ct.predicted_entropy() |
|
2000
|
|
|
0.6693279632926457 |
|
2001
|
|
|
|
|
2002
|
|
|
|
|
2003
|
|
|
.. versionadded:: 0.4.0 |
|
2004
|
|
|
|
|
2005
|
|
|
""" |
|
2006
|
|
|
try: |
|
2007
|
|
|
return ( |
|
2008
|
|
|
math.log(self.population()) |
|
2009
|
|
|
- sum( |
|
2010
|
|
|
_ * math.log(_) |
|
2011
|
|
|
for _ in (self.pred_pos_pop(), self.pred_neg_pop()) |
|
2012
|
|
|
) |
|
2013
|
|
|
/ self.population() |
|
2014
|
|
|
) |
|
2015
|
|
|
except ValueError: |
|
2016
|
|
|
return float('nan') |
|
2017
|
1 |
|
|
|
2018
|
1 |
|
def mutual_information(self) -> float: |
|
2019
|
|
|
"""Return the mutual information. |
|
2020
|
|
|
|
|
2021
|
|
|
Implementation based on https://github.com/Magnetic/proficiency-metric |
|
2022
|
|
|
|
|
2023
|
1 |
|
Returns |
|
2024
|
1 |
|
------- |
|
2025
|
|
|
float |
|
2026
|
1 |
|
The mutual information of the confusion table |
|
2027
|
|
|
|
|
2028
|
|
|
Cf. https://en.wikipedia.org/wiki/Mutual_information |
|
2029
|
|
|
|
|
2030
|
|
|
Example |
|
2031
|
|
|
------- |
|
2032
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
2033
|
|
|
>>> ct.mutual_information() |
|
2034
|
|
|
0.14738372372641576 |
|
2035
|
|
|
|
|
2036
|
|
|
|
|
2037
|
|
|
.. versionadded:: 0.4.0 |
|
2038
|
|
|
|
|
2039
|
|
|
""" |
|
2040
|
|
|
try: |
|
2041
|
|
|
return ( |
|
2042
|
|
|
sum( |
|
2043
|
|
|
_[0] * math.log(self.population() * _[0] / _[1]) |
|
2044
|
|
|
for _ in ( |
|
2045
|
|
|
( |
|
2046
|
1 |
|
( |
|
2047
|
1 |
|
self._fp, |
|
2048
|
|
|
self.cond_neg_pop() * self.pred_pos_pop(), |
|
2049
|
|
|
), |
|
2050
|
|
|
( |
|
2051
|
|
|
self._fn, |
|
2052
|
|
|
self.cond_pos_pop() * self.pred_neg_pop(), |
|
2053
|
|
|
), |
|
2054
|
|
|
( |
|
2055
|
1 |
|
self._tn, |
|
2056
|
1 |
|
self.cond_neg_pop() * self.pred_neg_pop(), |
|
2057
|
|
|
), |
|
2058
|
1 |
|
( |
|
2059
|
|
|
self._tp, |
|
2060
|
|
|
self.cond_pos_pop() * self.pred_pos_pop(), |
|
2061
|
|
|
), |
|
2062
|
|
|
) |
|
2063
|
|
|
) |
|
2064
|
|
|
) |
|
2065
|
|
|
/ self.population() |
|
2066
|
|
|
) |
|
2067
|
|
|
except ZeroDivisionError: |
|
2068
|
|
|
return float('nan') |
|
2069
|
|
|
|
|
2070
|
|
|
def proficiency(self) -> float: |
|
2071
|
|
|
"""Return the proficiency. |
|
2072
|
|
|
|
|
2073
|
|
|
Implementation based on https://github.com/Magnetic/proficiency-metric |
|
2074
|
|
|
:cite:`Steingold:2015` |
|
2075
|
|
|
|
|
2076
|
|
|
AKA uncertainty coefficient |
|
2077
|
|
|
|
|
2078
|
1 |
|
Cf. https://en.wikipedia.org/wiki/Uncertainty_coefficient |
|
2079
|
1 |
|
|
|
2080
|
|
|
Returns |
|
2081
|
|
|
------- |
|
2082
|
|
|
float |
|
2083
|
|
|
The proficiency of the confusion table |
|
2084
|
|
|
|
|
2085
|
|
|
Example |
|
2086
|
|
|
------- |
|
2087
|
1 |
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
2088
|
1 |
|
>>> ct.proficiency() |
|
2089
|
|
|
0.228116219897929 |
|
2090
|
1 |
|
|
|
2091
|
|
|
|
|
2092
|
|
|
.. versionadded:: 0.4.0 |
|
2093
|
|
|
|
|
2094
|
|
|
""" |
|
2095
|
|
|
return self.mutual_information() / self.actual_entropy() |
|
2096
|
|
|
|
|
2097
|
|
|
def igr(self) -> float: |
|
2098
|
|
|
"""Return information gain ratio. |
|
2099
|
|
|
|
|
2100
|
|
|
Implementation based on https://github.com/Magnetic/proficiency-metric |
|
2101
|
|
|
|
|
2102
|
|
|
Cf. https://en.wikipedia.org/wiki/Information_gain_ratio |
|
2103
|
|
|
|
|
2104
|
|
|
Returns |
|
2105
|
|
|
------- |
|
2106
|
|
|
float |
|
2107
|
|
|
The information gain ratio of the confusion table |
|
2108
|
|
|
|
|
2109
|
|
|
Example |
|
2110
|
|
|
------- |
|
2111
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
2112
|
1 |
|
>>> ct.igr() |
|
2113
|
1 |
|
0.22019657299448012 |
|
2114
|
|
|
|
|
2115
|
|
|
|
|
2116
|
|
|
.. versionadded:: 0.4.0 |
|
2117
|
|
|
|
|
2118
|
|
|
""" |
|
2119
|
|
|
return self.mutual_information() / self.predicted_entropy() |
|
2120
|
|
|
|
|
2121
|
|
|
def dependency(self) -> float: |
|
2122
|
|
|
"""Return dependency. |
|
2123
|
|
|
|
|
2124
|
|
|
Implementation based on https://github.com/Magnetic/proficiency-metric |
|
2125
|
|
|
|
|
2126
|
|
|
Returns |
|
2127
|
|
|
------- |
|
2128
|
|
|
float |
|
2129
|
|
|
The dependency of the confusion table |
|
2130
|
|
|
|
|
2131
|
|
|
Example |
|
2132
|
|
|
------- |
|
2133
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
2134
|
|
|
>>> ct.dependency() |
|
2135
|
|
|
0.12618094145262454 |
|
2136
|
|
|
|
|
2137
|
|
|
|
|
2138
|
|
|
.. versionadded:: 0.4.0 |
|
2139
|
1 |
|
|
|
2140
|
1 |
|
""" |
|
2141
|
|
|
return self.mutual_information() / self.joint_entropy() |
|
2142
|
1 |
|
|
|
2143
|
|
|
def lift(self) -> float: |
|
2144
|
|
|
"""Return lift. |
|
2145
|
|
|
|
|
2146
|
|
|
Implementation based on https://github.com/Magnetic/proficiency-metric |
|
2147
|
|
|
|
|
2148
|
|
|
Returns |
|
2149
|
|
|
------- |
|
2150
|
|
|
float |
|
2151
|
|
|
The lift of the confusion table |
|
2152
|
|
|
|
|
2153
|
|
|
Example |
|
2154
|
|
|
------- |
|
2155
|
|
|
>>> ct = ConfusionTable(120, 60, 20, 30) |
|
2156
|
|
|
>>> ct.lift() |
|
2157
|
|
|
1.3142857142857143 |
|
2158
|
|
|
|
|
2159
|
|
|
|
|
2160
|
|
|
.. versionadded:: 0.4.0 |
|
2161
|
|
|
|
|
2162
|
|
|
""" |
|
2163
|
|
|
try: |
|
2164
|
|
|
return ( |
|
2165
|
|
|
self._tp |
|
2166
|
|
|
* self.population() |
|
2167
|
1 |
|
/ (self.pred_pos_pop() * self.cond_pos_pop()) |
|
2168
|
|
|
) |
|
2169
|
1 |
|
except ZeroDivisionError: |
|
2170
|
|
|
return float('nan') |
|
2171
|
|
|
|
|
2172
|
|
|
|
|
2173
|
|
|
if __name__ == '__main__': |
|
2174
|
|
|
import doctest |
|
2175
|
|
|
|
|
2176
|
|
|
doctest.testmod() |
|
2177
|
|
|
|