1
- from application . data_access . datasette_utils import get_datasette_query_issue_summary
1
+ import pandas as pd
2
2
3
+ from application .data_access .datasette_utils import get_datasette_query
3
4
4
- def get_issue_summary ():
5
- issues_df = get_datasette_query_issue_summary (
6
- "performance/endpoint_dataset_issue_type_summary"
7
- )
5
+
6
+ def get_full_issue_summary ():
7
+ pagination_incomplete = True
8
+ offset = 0
9
+ issue_summary_df_list = []
10
+ while pagination_incomplete :
11
+ issue_summary_df = get_issue_summary (offset )
12
+ issue_summary_df_list .append (issue_summary_df )
13
+ pagination_incomplete = len (issue_summary_df ) == 1000
14
+ offset += 1000
15
+ issues_df = pd .concat (issue_summary_df_list )
8
16
9
17
# Convert DataFrame to a list of dictionaries (rows)
10
18
rows = issues_df .to_dict (orient = "records" )
@@ -14,46 +22,56 @@ def get_issue_summary():
14
22
{
15
23
"display_severity" : "No issues" ,
16
24
"severity" : "" ,
17
- "total_count_percentage" : 0.0 ,
25
+ "total_count" : 0 ,
26
+ "total_count_percentage" : 0.00 ,
18
27
"internal_count" : 0 ,
28
+ "internal_count_percentage" : 0.00 ,
19
29
"external_count" : 0 ,
20
- "total_count " : 0 ,
30
+ "external_count_percentage " : 0.00 ,
21
31
"classes" : "reporting-good-background" ,
22
32
},
23
33
{
24
34
"display_severity" : "Info" ,
25
35
"severity" : "info" ,
26
- "total_count_percentage" : 0.0 ,
36
+ "total_count" : 0 ,
37
+ "total_count_percentage" : 0.00 ,
27
38
"internal_count" : 0 ,
39
+ "internal_count_percentage" : 0.00 ,
28
40
"external_count" : 0 ,
29
- "total_count " : 0 ,
41
+ "external_count_percentage " : 0.00 ,
30
42
"classes" : "reporting-good-background" ,
31
43
},
32
44
{
33
45
"display_severity" : "Warning" ,
34
46
"severity" : "warning" ,
35
- "total_count_percentage" : 0.0 ,
47
+ "total_count" : 0 ,
48
+ "total_count_percentage" : 0.00 ,
36
49
"internal_count" : 0 ,
50
+ "internal_count_percentage" : 0.00 ,
37
51
"external_count" : 0 ,
38
- "total_count " : 0 ,
52
+ "external_count_percentage " : 0.00 ,
39
53
"classes" : "reporting-medium-background" ,
40
54
},
41
55
{
42
56
"display_severity" : "Error" ,
43
57
"severity" : "error" ,
44
- "total_count_percentage" : 0.0 ,
58
+ "total_count" : 0 ,
59
+ "total_count_percentage" : 0.00 ,
45
60
"internal_count" : 0 ,
61
+ "internal_count_percentage" : 0.00 ,
46
62
"external_count" : 0 ,
47
- "total_count " : 0 ,
63
+ "external_count_percentage " : 0.00 ,
48
64
"classes" : "reporting-bad-background" ,
49
65
},
50
66
{
51
67
"display_severity" : "Notice" ,
52
68
"severity" : "notice" ,
53
- "total_count_percentage" : 0.0 ,
69
+ "total_count" : 0 ,
70
+ "total_count_percentage" : 0.00 ,
54
71
"internal_count" : 0 ,
72
+ "internal_count_percentage" : 0.00 ,
55
73
"external_count" : 0 ,
56
- "total_count " : 0 ,
74
+ "external_count_percentage " : 0.00 ,
57
75
"classes" : "reporting-bad-background" ,
58
76
},
59
77
]
@@ -93,53 +111,64 @@ def get_issue_summary():
93
111
# Add issue_severity row
94
112
stats_rows = []
95
113
for issue_severity in issue_severity_counts :
114
+ if issue_severity ["internal_count" ] > 0 :
115
+ issue_severity ["internal_count_percentage" ] = round (
116
+ (issue_severity ["internal_count" ] / total_issues ) * 100 , 2
117
+ )
118
+
119
+ if issue_severity ["external_count" ] > 0 :
120
+ issue_severity ["external_count_percentage" ] = round (
121
+ (issue_severity ["external_count" ] / total_issues ) * 100 , 2
122
+ )
123
+
96
124
if issue_severity ["total_count" ] > 0 :
97
- issue_severity [
98
- "total_count_percentage"
99
- ] = f"{ int ((issue_severity ['total_count' ] / total_issues ) * 100 )} %"
125
+ issue_severity ["total_count_percentage" ] = round (
126
+ (issue_severity ["total_count" ] / total_issues ) * 100 , 2
127
+ )
128
+
100
129
stats_rows .append (
101
130
[
102
131
{
103
132
"text" : issue_severity ["display_severity" ],
104
133
"classes" : issue_severity ["classes" ] + " reporting-table-cell" ,
105
134
},
106
135
{
107
- "text" : issue_severity ["total_count" ],
108
- "classes" : "reporting-table-cell" ,
109
- },
110
- {
111
- "text" : issue_severity ["total_count_percentage" ],
136
+ "text" : f"{ issue_severity ['internal_count' ]} ({ issue_severity ['internal_count_percentage' ]} %)" ,
112
137
"classes" : "reporting-table-cell" ,
113
138
},
114
139
{
115
- "text" : issue_severity ["internal_count" ] ,
140
+ "text" : f" { issue_severity ['external_count' ] } ( { issue_severity [ 'external_count_percentage' ] } %)" ,
116
141
"classes" : "reporting-table-cell" ,
117
142
},
118
143
{
119
- "text" : issue_severity ["external_count" ] ,
144
+ "text" : f" { issue_severity ['total_count' ] } ( { issue_severity [ 'total_count_percentage' ] } %)" ,
120
145
"classes" : "reporting-table-cell" ,
121
146
},
122
147
]
123
148
)
124
149
125
- # Add totals row
150
+ # Add totals row, the bottom
126
151
stats_rows .append (
127
152
[
128
153
{"text" : "Total" , "classes" : "reporting-table-cell" },
154
+ {
155
+ "text" : f"{ total_internal } ({ round ((total_internal / total_issues )* 100 , 2 )} %)" ,
156
+ "classes" : "reporting-table-cell" ,
157
+ },
158
+ {
159
+ "text" : f"{ total_external } ({ round ((total_external / total_issues )* 100 , 2 )} %)" ,
160
+ "classes" : "reporting-table-cell" ,
161
+ },
129
162
{"text" : total_issues , "classes" : "reporting-table-cell" },
130
- {"text" : "" , "classes" : "reporting-table-cell" },
131
- {"text" : total_internal , "classes" : "reporting-table-cell" },
132
- {"text" : total_external , "classes" : "reporting-table-cell" },
133
163
]
134
164
)
135
165
136
166
# Define headers
137
167
stats_headers = [
138
- {"text" : "Issue Severity" },
139
- {"text" : "Count" },
140
- {"text" : "% Count" },
141
- {"text" : "Internal" },
142
- {"text" : "External" },
168
+ {"text" : "Severity" },
169
+ {"text" : "Internal (%)" },
170
+ {"text" : "External (%)" },
171
+ {"text" : "Total (%)" },
143
172
]
144
173
145
174
return {
@@ -153,16 +182,23 @@ def get_issue_summary():
153
182
}
154
183
155
184
156
- def get_issue_summary_for_csv ():
157
- issue_summary_df = get_datasette_query_issue_summary (
158
- "performance/endpoint_dataset_issue_type_summary"
159
- )
185
+ def get_full_issue_summary_for_csv ():
186
+ pagination_incomplete = True
187
+ offset = 0
188
+ issue_summary_df_list = []
189
+ while pagination_incomplete :
190
+ issue_summary_df = get_issue_summary_for_csv (offset )
191
+ issue_summary_df_list .append (issue_summary_df )
192
+ pagination_incomplete = len (issue_summary_df ) == 1000
193
+ offset += 1000
194
+ issue_summary_df = pd .concat (issue_summary_df_list )
195
+
160
196
issue_summary_df = issue_summary_df [issue_summary_df ["count_issues" ].notna ()]
161
197
162
198
return issue_summary_df [
163
199
[
164
200
"organisation" ,
165
- "name " ,
201
+ "organisation_name " ,
166
202
"pipeline" ,
167
203
"issue_type" ,
168
204
"severity" ,
@@ -181,3 +217,17 @@ def get_issue_summary_for_csv():
181
217
"resource_end_date" ,
182
218
]
183
219
]
220
+
221
+
222
+ def get_issue_summary (offset ):
223
+ sql = f"""
224
+ select count_issues, severity, responsibility from endpoint_dataset_issue_type_summary limit 1000 offset { offset }
225
+ """
226
+ return get_datasette_query ("performance" , sql )
227
+
228
+
229
+ def get_issue_summary_for_csv (offset ):
230
+ sql = f"""
231
+ select * from endpoint_dataset_issue_type_summary limit 1000 offset { offset }
232
+ """
233
+ return get_datasette_query ("performance" , sql )
0 commit comments