Skip to content

Commit 34102b1

Browse files
Merge pull request #44 from digital-land/endpoint_detail_page_fix
Endpoint detail dataset fix
2 parents e2b4e93 + 1b691cb commit 34102b1

File tree

3 files changed

+101
-100
lines changed

3 files changed

+101
-100
lines changed

application/blueprints/report/views.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -168,9 +168,9 @@ def download_csv():
168168
return send_file(file_path, download_name="overview_issue_summary.csv")
169169

170170

171-
@report_bp.get("endpoint/<endpoint_hash>")
172-
def endpoint_details(endpoint_hash):
173-
endpoint_details = get_endpoint_details(endpoint_hash)
171+
@report_bp.get("endpoint/<endpoint_hash>/<pipeline>")
172+
def endpoint_details(endpoint_hash, pipeline):
173+
endpoint_details = get_endpoint_details(endpoint_hash, pipeline)
174174
return render_template(
175175
"reporting/endpoint_details.html", endpoint_details=endpoint_details
176176
)
Original file line numberDiff line numberDiff line change
@@ -1,96 +1,97 @@
1-
from application.data_access.datasette_utils import get_datasette_query
2-
3-
4-
def get_endpoint_details(endpoint_hash):
5-
logs_df = get_logs(endpoint_hash)
6-
logs_headers = list(
7-
map(
8-
lambda x: {"text": x, "classes": "reporting-table-header"},
9-
logs_df.columns.values.tolist(),
10-
)
11-
)
12-
logs_rows = []
13-
for row in logs_df.values.tolist():
14-
logs_rows.append(
15-
list(map(lambda x: {"text": x, "classes": "reporting-table-cell"}, row))
16-
)
17-
18-
resources_df = get_resources(endpoint_hash)
19-
resources_headers = list(
20-
map(
21-
lambda x: {"text": x, "classes": "reporting-table-header"},
22-
resources_df.columns.values.tolist(),
23-
)
24-
)
25-
resources_rows = []
26-
for row in resources_df.values.tolist():
27-
resources_rows.append(
28-
list(map(lambda x: {"text": x, "classes": "reporting-table-cell"}, row))
29-
)
30-
31-
endpoint_info_df = get_endpoint_info(endpoint_hash)
32-
endpoint_info = endpoint_info_df.to_dict(orient="records")
33-
return {
34-
"logs_headers": logs_headers,
35-
"logs_rows": logs_rows,
36-
"resources_headers": resources_headers,
37-
"resources_rows": resources_rows,
38-
"endpoint_info": endpoint_info[0],
39-
}
40-
41-
42-
def get_logs(endpoint_hash):
43-
sql = f"""
44-
select
45-
substring(entry_date,1,10) as entry_date,
46-
case
47-
when (status = '') then exception
48-
else status
49-
end as status,
50-
resource
51-
from
52-
log
53-
where
54-
endpoint = '{endpoint_hash}'
55-
order by
56-
entry_date desc
57-
"""
58-
return get_datasette_query("digital-land", sql)
59-
60-
61-
def get_resources(endpoint_hash):
62-
sql = f"""
63-
select
64-
r.resource,
65-
r.start_date,
66-
r.end_date
67-
from
68-
resource r
69-
inner join resource_endpoint re on re.resource = r.resource
70-
where
71-
re.endpoint = '{endpoint_hash}'
72-
order by
73-
r.start_date desc
74-
"""
75-
return get_datasette_query("digital-land", sql)
76-
77-
78-
def get_endpoint_info(endpoint_hash):
79-
sql = f"""
80-
select
81-
sp.pipeline,
82-
o.name as organisation_name,
83-
s.organisation,
84-
e.endpoint,
85-
e.endpoint_url,
86-
e.start_date,
87-
substring(e.entry_date,1,10) as entry_date
88-
from
89-
endpoint e
90-
inner join source s on s.endpoint = e.endpoint
91-
inner join source_pipeline sp on sp.source = s.source
92-
inner join organisation o on o.organisation = replace(s.organisation, '-eng', '')
93-
where
94-
s.endpoint = '{endpoint_hash}'
95-
"""
96-
return get_datasette_query("digital-land", sql)
1+
from application.data_access.datasette_utils import get_datasette_query
2+
3+
4+
def get_endpoint_details(endpoint_hash, pipeline):
5+
logs_df = get_logs(endpoint_hash)
6+
logs_headers = list(
7+
map(
8+
lambda x: {"text": x, "classes": "reporting-table-header"},
9+
logs_df.columns.values.tolist(),
10+
)
11+
)
12+
logs_rows = []
13+
for row in logs_df.values.tolist():
14+
logs_rows.append(
15+
list(map(lambda x: {"text": x, "classes": "reporting-table-cell"}, row))
16+
)
17+
18+
resources_df = get_resources(endpoint_hash)
19+
resources_headers = list(
20+
map(
21+
lambda x: {"text": x, "classes": "reporting-table-header"},
22+
resources_df.columns.values.tolist(),
23+
)
24+
)
25+
resources_rows = []
26+
for row in resources_df.values.tolist():
27+
resources_rows.append(
28+
list(map(lambda x: {"text": x, "classes": "reporting-table-cell"}, row))
29+
)
30+
31+
endpoint_info_df = get_endpoint_info(endpoint_hash, pipeline)
32+
endpoint_info = endpoint_info_df.to_dict(orient="records")
33+
return {
34+
"logs_headers": logs_headers,
35+
"logs_rows": logs_rows,
36+
"resources_headers": resources_headers,
37+
"resources_rows": resources_rows,
38+
"endpoint_info": endpoint_info[0],
39+
}
40+
41+
42+
def get_logs(endpoint_hash):
43+
sql = f"""
44+
select
45+
substring(entry_date,1,10) as entry_date,
46+
case
47+
when (status = '') then exception
48+
else status
49+
end as status,
50+
resource
51+
from
52+
log
53+
where
54+
endpoint = '{endpoint_hash}'
55+
order by
56+
entry_date desc
57+
"""
58+
return get_datasette_query("digital-land", sql)
59+
60+
61+
def get_resources(endpoint_hash):
62+
sql = f"""
63+
select
64+
r.resource,
65+
r.start_date,
66+
r.end_date
67+
from
68+
resource r
69+
inner join resource_endpoint re on re.resource = r.resource
70+
where
71+
re.endpoint = '{endpoint_hash}'
72+
order by
73+
r.start_date desc
74+
"""
75+
return get_datasette_query("digital-land", sql)
76+
77+
78+
def get_endpoint_info(endpoint_hash, dataset):
79+
sql = f"""
80+
select
81+
sp.pipeline,
82+
o.name as organisation_name,
83+
s.organisation,
84+
e.endpoint,
85+
e.endpoint_url,
86+
e.start_date,
87+
substring(e.entry_date,1,10) as entry_date
88+
from
89+
endpoint e
90+
inner join source s on s.endpoint = e.endpoint
91+
inner join source_pipeline sp on sp.source = s.source
92+
inner join organisation o on o.organisation = replace(s.organisation, '-eng', '')
93+
where
94+
s.endpoint = '{endpoint_hash}'
95+
AND sp.pipeline = '{dataset}'
96+
"""
97+
return get_datasette_query("digital-land", sql)

application/data_access/odp_summaries/status.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,7 @@ def create_status_row(organisation, cohort, name, status_df, datasets):
171171
endpoint_hash = df_row["endpoint"]
172172
if len(endpoint_hash) > 0:
173173
html = (
174-
f'<a classes = "govuk-link--no-visited-state" href="../endpoint/{endpoint_hash.values[0]} ">'
174+
f'<a classes = "govuk-link--no-visited-state" href="../endpoint/{endpoint_hash.values[0]}/{dataset}">'
175175
+ text
176176
+ "</a>"
177177
)

0 commit comments

Comments
 (0)