-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathevaluation_measures.py
More file actions
47 lines (36 loc) · 1.62 KB
/
evaluation_measures.py
File metadata and controls
47 lines (36 loc) · 1.62 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
from download_data import get_dicc
class InformationRetrievalEvaluator:
@staticmethod
def evaluate(expected_result: list[int], query_result: list[int]) -> dict[str, float]:
return InformationRetrievalEvaluator.f1(set(expected_result), set(query_result), True)
@staticmethod
def accuracy(expected_results: set[int], query_result: set[int]) -> float:
true_positive = expected_results.intersection(query_result)
false_positive = query_result.difference(expected_results)
union = true_positive.union(false_positive)
if len(union) == 0:
return 0
return len(true_positive) / len(union)
@staticmethod
def recall(expected_results: set[int], query_result: set[int]) -> float:
true_positive = expected_results.intersection(query_result)
not_recovered = expected_results.difference(query_result)
union = true_positive.union(not_recovered)
if len(union) == 0:
return 0
return len(true_positive) / len(union)
@staticmethod
def f1(expected_results: set[int], query_result: set[int], return_all_measures: bool):
accuracy = InformationRetrievalEvaluator.accuracy(expected_results, query_result)
recall = InformationRetrievalEvaluator.recall(expected_results, query_result)
if accuracy + recall == 0:
f1 = 0
else:
f1 = 2 * accuracy * recall / (accuracy + recall)
if return_all_measures:
return {
'accuracy': accuracy,
'recall': recall,
'f1': f1
}
return f1