From b10cea47b0f494d761fa37e6e843cc8f6e29e12e Mon Sep 17 00:00:00 2001 From: Mohammed Karim Date: Sat, 4 May 2019 11:21:13 +0000 Subject: [PATCH 1/8] Ned fix for missing tiles --- conf/dataset_versions.json | 2 +- interferogram/sentinel/ned_dem.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/conf/dataset_versions.json b/conf/dataset_versions.json index d8604d1..07d0fa3 100644 --- a/conf/dataset_versions.json +++ b/conf/dataset_versions.json @@ -3,7 +3,7 @@ "S1-IW_SLC_SWATH": "v1.1", "S1-IFG": "v2.0.0", "S1-IFG-STITCHED": "v2.0.0", - "S1-GUNW": "v2.0.1", + "S1-GUNW": "v2.0.2", "S1-GUNW-MERGED": "v2.0.0", "S1-GUNW-MERGED-STITCHED": "v2.0.0", "S1-VALIDATED_IFG_STACK": "v1.0", diff --git a/interferogram/sentinel/ned_dem.py b/interferogram/sentinel/ned_dem.py index 950fdad..13f79e6 100755 --- a/interferogram/sentinel/ned_dem.py +++ b/interferogram/sentinel/ned_dem.py @@ -204,7 +204,7 @@ def download(url_list, username, password): return dem_files -def stitch(dem_files, downsample=None): +def stitch(bbox, dem_files, downsample=None): """Stitch NED1/NED13 dems.""" # unzip dem zip files @@ -219,8 +219,8 @@ def stitch(dem_files, downsample=None): check_call("gdalbuildvrt combinedDEM.vrt *.hgt", shell=True) if downsample is None: outsize_opt = "" else: outsize_opt = "-outsize {} {}".format(downsample, downsample) - check_call("gdal_translate -of ENVI {} combinedDEM.vrt stitched.dem".format(outsize_opt), shell=True) - + #check_call("gdal_translate -of ENVI {} -projwin {} {} {} {} combinedDEM.vrt stitched.dem".format(outsize_opt, bbox[2], bbox[0], bbox[3], bbox[1]), shell=True) + check_call("gdalwarp combinedDEM.vrt -te {} {} {} {} -of ENVI {} stitched.dem".format( bbox[2], bbox[0], bbox[3], bbox[1], outsize_opt), shell=True) #updte data to fill extream values with default value(-32768). First create a new dem file with the update #check_call('gdal_calc.py -A stitched.dem --outfile=stitched_new.dem --calc="-32768*(A<-32768)+A*(A>=-32768)"', shell=True) check_call('gdal_calc.py --format=ENVI -A stitched.dem --outfile=stitchedFix.dem --calc="A*(A>-1000)" --NoDataValue=0', shell=True) @@ -290,7 +290,7 @@ def main(url_base, username, password, action, bbox, downsample): # stitch if action == 'stitch': - stitched_dem = stitch(dem_files, downsample) + stitched_dem = stitch(bbox, dem_files, downsample) logger.info("stitched_dem: {}".format(stitched_dem)) From f066e334a87aee4d24021c0d7ff99230604df3e5 Mon Sep 17 00:00:00 2001 From: Mohammed Karim Date: Mon, 6 May 2019 16:54:13 +0000 Subject: [PATCH 2/8] revert back +-4 --- interferogram/sentinel/create_standard_product_s1.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/interferogram/sentinel/create_standard_product_s1.py b/interferogram/sentinel/create_standard_product_s1.py index 3e85be3..6f24a14 100644 --- a/interferogram/sentinel/create_standard_product_s1.py +++ b/interferogram/sentinel/create_standard_product_s1.py @@ -992,11 +992,11 @@ def main(): if dem_type == "NED13-downsampled": downsample_option = "-d 33%" else: downsample_option = "" - - dem_S = dem_S - 4 if dem_S > -86 else dem_S - dem_N = dem_N + 4 if dem_N < 86 else dem_N - dem_W = dem_W - 4 if dem_W > -176 else dem_W - dem_E = dem_E + 4 if dem_E < 176 else dem_E + + dem_S = dem_S - 1 if dem_S > -89 else dem_S + dem_N = dem_N + 1 if dem_N < 89 else dem_N + dem_W = dem_W - 1 if dem_W > -179 else dem_W + dem_E = dem_E + 1 if dem_E < 179 else dem_E ''' dem_S, dem_N, dem_W, dem_E = bbox dem_S = int(math.floor(dem_S)) From 82284c0b490e343c8dbf7d545ac290809a8106ee Mon Sep 17 00:00:00 2001 From: Mohammed Karim Date: Thu, 9 May 2019 01:03:43 +0000 Subject: [PATCH 3/8] added PICKLE and fine_interferrogram with merged --- conf/dataset_versions.json | 2 +- interferogram/sentinel/create_standard_product_s1.py | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/conf/dataset_versions.json b/conf/dataset_versions.json index 07d0fa3..2c669c8 100644 --- a/conf/dataset_versions.json +++ b/conf/dataset_versions.json @@ -4,7 +4,7 @@ "S1-IFG": "v2.0.0", "S1-IFG-STITCHED": "v2.0.0", "S1-GUNW": "v2.0.2", - "S1-GUNW-MERGED": "v2.0.0", + "S1-GUNW-MERGED": "v2.0.2", "S1-GUNW-MERGED-STITCHED": "v2.0.0", "S1-VALIDATED_IFG_STACK": "v1.0", "S1-VALIDATED_TS_STACK": "v1.0", diff --git a/interferogram/sentinel/create_standard_product_s1.py b/interferogram/sentinel/create_standard_product_s1.py index 6f24a14..d81302b 100644 --- a/interferogram/sentinel/create_standard_product_s1.py +++ b/interferogram/sentinel/create_standard_product_s1.py @@ -1617,11 +1617,16 @@ def main(): #copy files to merged directory + pickle_dir = "{}/PICKLE".format(prod_dir) + fine_interferogram_xml = "{}/fine_interferogram/IW1.xml".format(prod_dir) + met_file_merged = os.path.join(prod_dir_merged, "{}.met.json".format(ifg_id_merged)) ds_file_merged = os.path.join(prod_dir_merged, "{}.dataset.json".format(ifg_id_merged)) shutil.copy(ds_file, ds_file_merged) shutil.copy(met_file, met_file_merged) shutil.copytree("merged", os.path.join(prod_dir_merged, "merged")) + shutil.copytree(pickle_dir, os.path.join(prod_dir_merged, "PICKLE")) + shutil.copy(fine_interferogram_xml, os.path.join(prod_dir_merged, "fine_interferogram.xml")) #shutil.copytree(tiles_dir, os.path.join(prod_dir_merged, "tiles")) #logger.info( json.dump(md, f, indent=2)) From 34944402451e27e14e16e79167858673cf7dd32b Mon Sep 17 00:00:00 2001 From: Mohammed Karim Date: Thu, 9 May 2019 03:15:14 +0000 Subject: [PATCH 4/8] update --- interferogram/sentinel/create_standard_product_s1.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/interferogram/sentinel/create_standard_product_s1.py b/interferogram/sentinel/create_standard_product_s1.py index d81302b..feeaa70 100644 --- a/interferogram/sentinel/create_standard_product_s1.py +++ b/interferogram/sentinel/create_standard_product_s1.py @@ -1618,14 +1618,14 @@ def main(): #copy files to merged directory pickle_dir = "{}/PICKLE".format(prod_dir) - fine_interferogram_xml = "{}/fine_interferogram/IW1.xml".format(prod_dir) + fine_interferogram_xml = "fine_interferogram/IW1.xml" met_file_merged = os.path.join(prod_dir_merged, "{}.met.json".format(ifg_id_merged)) ds_file_merged = os.path.join(prod_dir_merged, "{}.dataset.json".format(ifg_id_merged)) shutil.copy(ds_file, ds_file_merged) shutil.copy(met_file, met_file_merged) shutil.copytree("merged", os.path.join(prod_dir_merged, "merged")) - shutil.copytree(pickle_dir, os.path.join(prod_dir_merged, "PICKLE")) + shutil.copytree("PICKLE", os.path.join(prod_dir_merged, "PICKLE")) shutil.copy(fine_interferogram_xml, os.path.join(prod_dir_merged, "fine_interferogram.xml")) #shutil.copytree(tiles_dir, os.path.join(prod_dir_merged, "tiles")) From fe4925f7c1f96e86f7bf88a754b9698d64a5e9e5 Mon Sep 17 00:00:00 2001 From: Mohammed Karim Date: Fri, 10 May 2019 17:15:18 +0000 Subject: [PATCH 5/8] grq_v2.0.1_s1-gunw --- interferogram/sentinel/create_standard_product_s1.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/interferogram/sentinel/create_standard_product_s1.py b/interferogram/sentinel/create_standard_product_s1.py index feeaa70..0916c7a 100644 --- a/interferogram/sentinel/create_standard_product_s1.py +++ b/interferogram/sentinel/create_standard_product_s1.py @@ -109,13 +109,12 @@ def check_ifg_status(ifg_id): logger.info("check_slc_status : returning False") return False -def get_dataset_by_hash(ifg_hash): +def get_dataset_by_hash(ifg_hash, es_index="grq"): """Query for existence of dataset by ID.""" uu = UrlUtils() es_url = uu.rest_url #es_index = "{}_{}_s1-ifg".format(uu.grq_index_prefix, version) - es_index = "grq" # query query = { @@ -151,6 +150,7 @@ def get_dataset_by_hash(ifg_hash): def check_ifg_status_by_hash(new_ifg_hash): + es_index="grq_*_s1-gunw", result = get_dataset_by_hash(new_ifg_hash) total = result['hits']['total'] logger.info("check_slc_status_by_hash : total : %s" %total) From 8de33d61ea80f1592d022be0efa1af960bd53b84 Mon Sep 17 00:00:00 2001 From: Mohammed Karim Date: Fri, 10 May 2019 17:16:08 +0000 Subject: [PATCH 6/8] grq_v2.0.1_s1-gunw --- interferogram/sentinel/create_standard_product_s1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/interferogram/sentinel/create_standard_product_s1.py b/interferogram/sentinel/create_standard_product_s1.py index 0916c7a..9554993 100644 --- a/interferogram/sentinel/create_standard_product_s1.py +++ b/interferogram/sentinel/create_standard_product_s1.py @@ -150,7 +150,7 @@ def get_dataset_by_hash(ifg_hash, es_index="grq"): def check_ifg_status_by_hash(new_ifg_hash): - es_index="grq_*_s1-gunw", + es_index="grq_*_s1-gunw" result = get_dataset_by_hash(new_ifg_hash) total = result['hits']['total'] logger.info("check_slc_status_by_hash : total : %s" %total) From 1d40eaf1a603fa3013c788e69d40200bb9dfa370 Mon Sep 17 00:00:00 2001 From: Mohammed Karim Date: Fri, 10 May 2019 17:21:22 +0000 Subject: [PATCH 7/8] grq_v2.0.1_s1-gunw --- interferogram/sentinel/create_standard_product_s1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/interferogram/sentinel/create_standard_product_s1.py b/interferogram/sentinel/create_standard_product_s1.py index 9554993..1ebfc9d 100644 --- a/interferogram/sentinel/create_standard_product_s1.py +++ b/interferogram/sentinel/create_standard_product_s1.py @@ -151,7 +151,7 @@ def get_dataset_by_hash(ifg_hash, es_index="grq"): def check_ifg_status_by_hash(new_ifg_hash): es_index="grq_*_s1-gunw" - result = get_dataset_by_hash(new_ifg_hash) + result = get_dataset_by_hash(new_ifg_hash, es_index) total = result['hits']['total'] logger.info("check_slc_status_by_hash : total : %s" %total) if total>0: From e1ddc2eadfe5bd3b6ffe30bc49a206b71374cb46 Mon Sep 17 00:00:00 2001 From: Mohammed Karim Date: Fri, 10 May 2019 17:39:19 +0000 Subject: [PATCH 8/8] update --- interferogram/sentinel/create_standard_product_s1.py | 1 + 1 file changed, 1 insertion(+) diff --git a/interferogram/sentinel/create_standard_product_s1.py b/interferogram/sentinel/create_standard_product_s1.py index 1ebfc9d..b125bf8 100644 --- a/interferogram/sentinel/create_standard_product_s1.py +++ b/interferogram/sentinel/create_standard_product_s1.py @@ -122,6 +122,7 @@ def get_dataset_by_hash(ifg_hash, es_index="grq"): "bool":{ "must":[ { "term":{"metadata.full_id_hash.raw": ifg_hash} }, + { "term":{"dataset.raw": "S1-GUNW"} } ] } }