Skip to content

Commit

Permalink
Merge pull request #748 from asfadmin/test
Browse files Browse the repository at this point in the history
Baseline speedup, requirements.txt updates
  • Loading branch information
SpicyGarlicAlbacoreRoll authored Apr 25, 2024
2 parents 452918a + e2cdac4 commit 50998b4
Show file tree
Hide file tree
Showing 10 changed files with 86 additions and 53 deletions.
1 change: 0 additions & 1 deletion .github/workflows/reusable-DeployStack-SearchAPI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,6 @@ jobs:
run: |
sudo apt-get update
sudo apt-get install libgdal-dev
export SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL=True
python3 -m pip install --no-cache-dir --upgrade pip
python3 -m pip install --no-cache-dir wheel Cython
python3 -m pip install -r requirements.txt --use-deprecated=legacy-resolver
Expand Down
1 change: 0 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ WORKDIR "${LAMBDA_TASK_ROOT}/Discovery-SearchAPI"
COPY requirements.txt .
RUN mkdir "${LAMBDA_TASK_ROOT}/python-packages"
ENV PYTHONPATH "${PYTHONPATH}:${LAMBDA_TASK_ROOT}/python-packages"
ENV SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL=True
RUN python3 -m pip install --no-cache-dir -r requirements.txt --target "${LAMBDA_TASK_ROOT}/python-packages"

## Copy required files (Already inside Discovery-SearchAPI dir):
Expand Down
13 changes: 6 additions & 7 deletions SearchAPI/Baseline/Calc.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from math import sqrt, cos, sin, radians
import numpy as np
import dateparser

import ciso8601
# WGS84 constants
a = 6378137
f = pow((1.0 - 1 / 298.257224), 2)
Expand All @@ -15,17 +14,17 @@ def calculate_perpendicular_baselines(reference, stack):
product['noStateVectors'] = True
continue

asc_node_time = dateparser.parse(product['ascendingNodeTime']).timestamp()
asc_node_time = ciso8601.parse_datetime(product['ascendingNodeTime']).timestamp()

start = dateparser.parse(product['startTime']).timestamp()
end = dateparser.parse(product['stopTime']).timestamp()
start = ciso8601.parse_datetime(product['startTime']).timestamp()
end = ciso8601.parse_datetime(product['stopTime']).timestamp()
center = start + ((end - start) / 2)
product['relative_start_time'] = start - asc_node_time
product['relative_center_time'] = center - asc_node_time
product['relative_end_time'] = end - asc_node_time

t_pre = dateparser.parse(product['sv_t_pos_pre']).timestamp()
t_post = dateparser.parse(product['sv_t_pos_post']).timestamp()
t_pre = ciso8601.parse_datetime(product['sv_t_pos_pre']).timestamp()
t_post = ciso8601.parse_datetime(product['sv_t_pos_post']).timestamp()
product['relative_sv_pre_time'] = t_pre - asc_node_time
product['relative_sv_post_time'] = t_post - asc_node_time

Expand Down
6 changes: 3 additions & 3 deletions SearchAPI/Baseline/Stack.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import dateparser
import ciso8601
from SearchAPI.CMR.Translate import translate_params, input_fixer
from SearchAPI.CMR.Query import CMRQuery
from .Calc import calculate_perpendicular_baselines
Expand Down Expand Up @@ -178,13 +178,13 @@ def get_default_product_type(reference):
def calculate_temporal_baselines(reference, stack):
for product in stack:
if product['granuleName'] == reference:
reference_start = dateparser.parse(product['startTime'])
reference_start = ciso8601.parse_datetime(product['startTime'])
break
for product in stack:
if product['granuleName'] == reference:
product['temporalBaseline'] = 0
else:
start = dateparser.parse(product['startTime'])
start = ciso8601.parse_datetime(product['startTime'])
product['temporalBaseline'] = (start.date() - reference_start.date()).days
return stack

Expand Down
9 changes: 8 additions & 1 deletion SearchAPI/CMR/Output/jsonlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ def req_fields_jsonlite():
'subswath',
'pgeVersion',
'operaBurstID',
'additionalUrls'
'additionalUrls',
's3Urls'
]
return fields

Expand Down Expand Up @@ -186,4 +187,10 @@ def getItem(self, p):
if p.get('validityStartDate'):
result['opera']['validityStartDate'] = p.get('validityStartDate')

if p.get('platform') == 'NISAR':
result['nisar'] = {
'additionalUrls': p.get('additionalUrls', []),
's3Urls': p.get('s3Urls', [])
}

return result
3 changes: 3 additions & 0 deletions SearchAPI/CMR/Output/jsonlite2.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,4 +61,7 @@ def getItem(self, p):
if p.get('opera') is not None:
result['s1o'] = p['opera']

if p.get('nisar') is not None:
result['nsr'] = p['nisar']

return result
1 change: 1 addition & 0 deletions SearchAPI/CMR/Translate/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ def get_field_paths():
'track': attr_path('PATH_NUMBER'),
'pgeVersion': "./PGEVersionClass/PGEVersion",
'additionalUrls': "./OnlineAccessURLs",
's3Urls': "./OnlineAccessURLs",

# BURST FIELDS
'absoluteBurstID': attr_path('BURST_ID_ABSOLUTE'),
Expand Down
30 changes: 26 additions & 4 deletions SearchAPI/CMR/Translate/parse_cmr_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,19 +205,41 @@ def float_or_none(a):
result['downloadUrl'] = urls[0]
result['fileName'] = result['granuleName'] + '.' + urls[0].split('.')[-1]



def get_all_urls():
accessPath = './OnlineAccessURLs/OnlineAccessURL/URL'
resourcesPath = './OnlineResources/OnlineResource/URL'

access_urls = get_all_vals(accessPath)
if access_urls is None:
access_urls = []

resource_urls = get_all_vals(resourcesPath)
if resource_urls is None:
resource_urls = []

return list(set([*access_urls, *resource_urls]))

def get_http_urls():
return [url for url in get_all_urls() if not url.endswith('.md5') and not url.startswith('s3://') and not 's3credentials' in url]

def get_s3_urls():
return [url for url in get_all_urls() if not url.endswith('.md5') and (url.startswith('s3://') or 's3credentials' in url)]

if result.get('product_file_id', '').startswith('OPERA'):
result['beamMode'] = get_val(attr_path('BEAM_MODE'))
accessUrls = [url for url in get_all_vals('./OnlineAccessURLs/OnlineAccessURL/URL') if not url.endswith('.md5') and not url.startswith('s3://') and not 's3credentials' in url]
OnlineResources = [url for url in get_all_vals('./OnlineResources/OnlineResource/URL') if not url.endswith('.md5') and not url.startswith('s3://') and not 's3credentials' in url]
result['additionalUrls'] = list(set([*accessUrls, *OnlineResources]))
result['additionalUrls'] = get_http_urls()
result['configurationName'] = "Interferometric Wide. 250 km swath, 5 m x 20 m spatial resolution and burst synchronization for interferometry. IW is considered to be the standard mode over land masses."

if (providerbrowseUrls := get_all_vals('./AssociatedBrowseImageUrls/ProviderBrowseUrl/URL')):
result['browse'] = [url for url in providerbrowseUrls if not url.startswith('s3://')]

if 'STATIC' in result['processingLevel']:
result['validityStartDate'] = get_val('./Temporal/SingleDateTime')

if result.get('platform', '') == 'NISAR':
result['additionalUrls'] = get_http_urls()
result['s3Urls'] = get_s3_urls()
return result


Expand Down
67 changes: 35 additions & 32 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,79 +3,79 @@ argcomplete==1.12.3
asn1crypto==1.4.0
atomicwrites==1.4.0
attrs==21.2.0
backports.zoneinfo==0.2.1;python_version<"3.9" # https://stackoverflow.com/questions/71712258/error-could-not-build-wheels-for-backports-zoneinfo-which-is-required-to-insta
blinker==1.7.0
boto3==1.19.0
botocore==1.22.0
Brotli==1.0.9
certifi==2021.10.8
certifi==2023.7.22
cffi==1.15.0
cfn-flip==1.3.0
chardet==4.0.0
charset-normalizer==2.0.7
click==7.1.2
ciso8601==2.3.1
click==8.1.7
click-plugins==1.1.1
cligj==0.7.2
coverage==6.0.2
# cryptography==3.4.7 # Version 35.0.0 breaks zappa=0.52.0 deployments. (yes, their versions went from 3.4.7, 3.4.8, then 35.0.0)
dateparser==1.1.0
DateTime==4.3
defusedxml==0.7.1
Deprecated==1.2.13
docutils==0.17.1
Dumper==1.2.0
durationpy==0.5
exceptiongroup==1.2.1
execnet==1.9.0
Fiona==1.8.20
Flask==2.0.2
fiona==1.9.6
Flask==2.3.2
Flask-Compress==1.10.1
Flask-Cors==3.0.10
flask-lambda-python36==0.1.0
flask-talisman==0.8.1
future==0.18.2
geojson==2.5.0
geomet==0.3.0
geopandas==0.10.0
gitdb==4.0.7
gitdb2==4.0.2
GitPython==3.1.24
gunicorn==20.1.0
GitPython==3.1.41
gunicorn==22.0.0
hjson==3.0.2
hypothesis==6.37.0
idna==3.3
importlib-metadata==4.8.1
iniconfig==1.1.1
itsdangerous==2.0.1
Jinja2==3.0.2
# iniconfig==1.2.1
itsdangerous==2.2.0
Jinja2==3.1.3
jmespath==0.10.0
joblib==1.1.0
joblib==1.2.0
kappa==0.6.0
kml2geojson==4.0.2
lambda-packages==0.20.0
libpagure==0.22
lxml==4.7.1
MarkupSafe==2.0.1
lxml==5.2.1
MarkupSafe==2.1.5
more-itertools==8.10.0
munch==2.5.0
packaging==21.0
numpy==1.22.4
packaging==24.0
pandas==1.3.4
pathlib2==2.3.6
pep517==0.12.0
pexpect==4.8.0
pip-tools==6.4.0
placebo==0.10.0
pluggy==1.0.0
pluggy==1.5.0
ptyprocess==0.7.0
py==1.10.0
pycparser==2.20
PyGithub==1.55
PyJWT==2.3.0
PyJWT==2.4.0
pykml==0.2.0
# PyNaCl==1.4.0 # breaks zappa 0.52.0 (didn't check earlier versions, not sure if we need this)
# pyOpenSSL==21.0.0 # (Requires cryptography, which makes zappa throw)
# PyNaCl==1.5.0
pyparsing==2.4.7
pyproj==3.6.0
pyshp==2.1.3
pytest==6.2.5
pytest-automation==1.1.2
pytest==8.1.1
pytest-automation==3.0.0
pytest-cov==3.0.0
pytest-forked==1.3.0
pytest-xdist==2.4.0
Expand All @@ -84,28 +84,31 @@ python-gitlab==2.10.1
python-slugify==5.0.2
pytz==2021.3
pytz-deprecation-shim==0.1.0.post0
PyYAML==6.0
PyYAML==6.0.1
regex==2021.10.8
requests==2.26.0
requests-toolbelt==0.9.1
responses==0.18.0
s3transfer==0.5.0
scandir==1.10.0
scikit-learn==1.1.3 # WARNING: 0.24.1 breaks ShorelineMask26 test
scikit-learn==1.1.3
scipy==1.13.0
serverless-wsgi==3.0.0
Shapely==1.7.1
six==1.16.0
# sklearn==0.0.post5
smmap==4.0.0
sortedcontainers==2.4.0
text-unidecode==1.3
threadpoolctl==3.4.0
toml==0.10.2
tomli==1.2.1
tomli==2.0.1
typing-extensions==3.10.0.2
tzdata==2021.4
tzlocal==2.0.0 # tzlocal.get_localzone() changed it's return type after this (No 'localize' attr)
tzlocal==2.0.0
urllib3==1.26.7
Werkzeug==2.0.2
WKTUtils==1.1.6
wrapt==1.13.2
Werkzeug==2.3.3
WKTUtils==2.0.0
wrapt==1.16.0
zipp==3.6.0
zope.interface==4.7.2
numpy==1.21.3
zope.interface==4.7.2
8 changes: 4 additions & 4 deletions yml_tests/test_URLs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2472,8 +2472,8 @@ tests:
expected file: csv
expected code: 200

- start yesterday count:
start: yesterday
- start 3 days ago count:
start: 3 days ago
output: count

expected file: count
Expand All @@ -2486,8 +2486,8 @@ tests:
expected file: count
expected code: 200

- start 1dayago count:
start: 1+day+ago
- start 3daysago count:
start: 3+days+ago
output: count

expected file: count
Expand Down

0 comments on commit 50998b4

Please sign in to comment.