Skip to content

Commit

Permalink
Merge pull request #12 from OCHA-DAP/dev
Browse files Browse the repository at this point in the history
dev into prod for new version
  • Loading branch information
danmihaila authored Dec 10, 2024
2 parents 51730f4 + 504336b commit 4bcd0a0
Show file tree
Hide file tree
Showing 4 changed files with 53 additions and 21 deletions.
2 changes: 1 addition & 1 deletion dev-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
pytest~=8.3.3
pytest~=8.3.4
pytest-cov~=6.0.0
mock~=5.1.0
3 changes: 2 additions & 1 deletion processing/helpers.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging
import time
from typing import Dict

logger = logging.getLogger(__name__)

Expand All @@ -22,7 +23,7 @@
EVENT_TYPE_SPREADSHEET_SHEET_CHANGED,
}

def get_change_summary(event):
def get_change_summary(event: Dict)->str:
if event :
ev_type = event.get('event_type')
resource_name = event.get('resource_name','A resource')
Expand Down
40 changes: 29 additions & 11 deletions processing/main.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
import datetime
from typing import Dict, Set
from typing import Dict, Set, List

from processing.helpers import get_change_summary
from processing.novu import push_notification_to_novu
Expand All @@ -10,26 +10,44 @@

config = get_config()

def is_cached_expired(start_time, cache_time):
def is_cached_expired(start_time: datetime, cache_time: datetime):
cache_expiration_in_hours = int(config.HDX_DATASETS_CSV_EXPIRATION_HOURS)
if start_time - cache_time > datetime.timedelta(hours=cache_expiration_in_hours):
return True
else:
return False

# List of resource names to skip
SKIP_RESOURCE_NAMES_LIST = ['QuickCharts', 'qc_data.csv']

def contains_any_skip_resource(input_string: str, skip_list: List[str]):
"""
Checks if the input string contains any item from the skip list.
Args:
input_string (str): The string to check.
skip_list (list): A list of substrings to look for.
Returns:
bool: True if any item from the skip list is found, False otherwise.
"""
return any(skip_item in input_string for skip_item in skip_list)

def process(dataset_id_list: Set[str], event: Dict):
if dataset_id_list:
# comment this line if you need to test local (without matching the dataset id to the list
if event and 'dataset_id' in event and event.get('dataset_id') in dataset_id_list:
change_summary = get_change_summary(event)
_dataset_id = event.get('dataset_id').replace('-', '_')
data_dict = {
'event': event,
'change_summary': change_summary,
'unsubscribe_token_key': f'unsubscribe_token_{_dataset_id}',
'hdx_url': config.HDX_URL
}
push_notification_to_novu(data_dict)

if not contains_any_skip_resource(event.get('resource_name', ''), SKIP_RESOURCE_NAMES_LIST):
change_summary = get_change_summary(event)
_dataset_id = event.get('dataset_id').replace('-', '_')
data_dict = {
'event': event,
'change_summary': change_summary,
'unsubscribe_token_key': f'unsubscribe_token_{_dataset_id}',
'hdx_url': config.HDX_URL
}
push_notification_to_novu(data_dict)

else:
pass # dataset id list empty, pushing notification for every dataset?
Expand Down
29 changes: 21 additions & 8 deletions tests/test_notifications.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import logging

import mock
from processing.helpers import get_change_summary
from processing.datasets import get_dataset_id_list

from processing.main import process as process

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -33,22 +33,22 @@ def _generate_resource_created_test_event():
},
{
'field': 'name',
'new_value': 'ING.Account.Statement_30-09-24_999911495277_USD.pdf',
'new_display_value': 'ING.Account.Statement_30-09-24_999911495277_USD.pdf',
'new_value': 'test.csv',
'new_display_value': 'test.csv',
'old_value': None,
'old_display_value': None
},
{
'field': 'format',
'new_value': 'PDF',
'new_display_value': 'PDF',
'new_value': 'csv',
'new_display_value': 'csv',
'old_value': None,
'old_display_value': None
},
{
'field': 'url',
'new_value': 'https://data.humdata.local/dataset/f679245a-5740-4ba6-a395-ec4e1ac20325/resource/b017f603-fb5e-4169-a7c5-88dbf202d368/download/ing.account.statement_30-09-24_999911495277_usd.pdf',
'new_display_value': 'https://data.humdata.local/dataset/f679245a-5740-4ba6-a395-ec4e1ac20325/resource/b017f603-fb5e-4169-a7c5-88dbf202d368/download/ing.account.statement_30-09-24_999911495277_usd.pdf',
'new_value': 'https://data.humdata.local/dataset/f679245a-5740-4ba6-a395-ec4e1ac20325/resource/b017f603-fb5e-4169-a7c5-88dbf202d368/download/test.csv',
'new_display_value': 'https://data.humdata.local/dataset/f679245a-5740-4ba6-a395-ec4e1ac20325/resource/b017f603-fb5e-4169-a7c5-88dbf202d368/download/test.csv',
'old_value': None,
'old_display_value': None
}
Expand All @@ -72,3 +72,16 @@ def test_get_change_summary():
def test_get_dataset_id_list():
dataset_id_list = get_dataset_id_list()
assert len(dataset_id_list) > 0

@mock.patch('processing.main.push_notification_to_novu')
def test_skip_values_not_skipping(push_notification_mock):
event_dict = _generate_resource_created_test_event()
process({'test-dataset-id'},event_dict)
assert push_notification_mock.call_count == 1

@mock.patch('processing.main.push_notification_to_novu')
def test_skip_values_skipping(push_notification_mock):
event_dict = _generate_resource_created_test_event()
event_dict['resource_name'] = 'Testing QuickCharts file .csv'
process({'test-dataset-id'}, event_dict)
assert push_notification_mock.call_count == 0

0 comments on commit 4bcd0a0

Please sign in to comment.