From db7ee70caa18742e439b4208ce07ee746a8313f6 Mon Sep 17 00:00:00 2001 From: Mohammed Karim Date: Wed, 2 Oct 2019 17:56:32 +0000 Subject: [PATCH 1/5] update --- .../sentinel/create_standard_product_s1.py | 20 +++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/interferogram/sentinel/create_standard_product_s1.py b/interferogram/sentinel/create_standard_product_s1.py index 7dc3a9f..4f85d3e 100644 --- a/interferogram/sentinel/create_standard_product_s1.py +++ b/interferogram/sentinel/create_standard_product_s1.py @@ -610,6 +610,22 @@ def get_polarization2(id): elif pp == "SH": return "hh" else: raise RuntimeError("Unrecognized polarization: %s" % pp) +def get_pol_data_from_slcs(slcs): + pol_data = [] + for slc in slcs: + pol = get_polarization(slc).strip().lower() + logger.info("get_pol_data_from_slcs: pol data of SLC : {} is {}".format(slc, pol)) + if pol not in pol_data: + pol_data.append(pol) + + if len(pol_data)==0 or len(pol_data)>1: + err_msg = "get_pol_data_from_slcs: Found Multiple Polarization or No Polarization for slcs {} : {}".format(slcs, pol_data) + print(err_msg) + raise RuntimeError(err_msg) + + return pol_data[0] + + def get_polarization(id): """Return polarization.""" @@ -941,8 +957,8 @@ def main(): slave_safe_dirs.append(i.replace(".zip", ".SAFE")) # get polarization values - master_pol = get_polarization(master_safe_dirs[0]) - slave_pol = get_polarization(slave_safe_dirs[0]) + master_pol = get_pol_data_from_slcs(master_safe_dirs) + slave_pol = get_pol_data_from_slcs(slave_safe_dirs) if master_pol == slave_pol: match_pol = master_pol else: From dc7df11b9041fd74ab6681594cc56e013e446866 Mon Sep 17 00:00:00 2001 From: torresal Date: Mon, 21 Oct 2019 10:10:03 -0700 Subject: [PATCH 2/5] Stop production of S1-GUNW-MERGED Comment out anything related to creation and copying files to 'prod_dir_merged' Line: 1343, 1346, 1349, 1411, 1729-1759 --- interferogram/sentinel/create_standard_product_s1.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/interferogram/sentinel/create_standard_product_s1.py b/interferogram/sentinel/create_standard_product_s1.py index 4f85d3e..7fa36f2 100644 --- a/interferogram/sentinel/create_standard_product_s1.py +++ b/interferogram/sentinel/create_standard_product_s1.py @@ -1340,13 +1340,13 @@ def main(): logger.info("ifg_id_merged : %s" %ifg_id_merged) prod_dir = id - prod_dir_merged = ifg_id_merged + #prod_dir_merged = ifg_id_merged logger.info("prod_dir : %s" %prod_dir) - logger.info("prod_dir_merged : %s" %prod_dir_merged) + #logger.info("prod_dir_merged : %s" %prod_dir_merged) os.makedirs(prod_dir, 0o755) - os.makedirs(prod_dir_merged, 0o755) + #os.makedirs(prod_dir_merged, 0o755) # make metadata geocube os.chdir("merged") @@ -1408,7 +1408,7 @@ def main(): # save other files to product directory shutil.copyfile("_context.json", os.path.join(prod_dir,"{}.context.json".format(id))) - shutil.copyfile("_context.json", os.path.join(prod_dir_merged,"{}.context.json".format(ifg_id_merged))) + #shutil.copyfile("_context.json", os.path.join(prod_dir_merged,"{}.context.json".format(ifg_id_merged))) fine_int_xmls = [] for swathnum in swath_list: @@ -1725,7 +1725,7 @@ def main(): #copy files to merged directory pickle_dir = "{}/PICKLE".format(prod_dir) fine_interferogram_xml = "fine_interferogram/IW1.xml" - + ''' met_file_merged = os.path.join(prod_dir_merged, "{}.met.json".format(ifg_id_merged)) ds_file_merged = os.path.join(prod_dir_merged, "{}.dataset.json".format(ifg_id_merged)) shutil.copy(ds_file, ds_file_merged) @@ -1746,6 +1746,7 @@ def main(): except Exception as err: logger.info(str(err)) ''' + ''' for f in os.listdir("merged"): if f.endswith(".vrt"): src = os.path.join(os.getcwd(), "merged", f) From 46cc4b82382e68f36344e0a94202504cd470e7d4 Mon Sep 17 00:00:00 2001 From: mohammed karim Date: Fri, 8 Nov 2019 18:56:21 +0000 Subject: [PATCH 3/5] captalize pol --- interferogram/sentinel/create_standard_product_s1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/interferogram/sentinel/create_standard_product_s1.py b/interferogram/sentinel/create_standard_product_s1.py index 7fa36f2..9e58320 100644 --- a/interferogram/sentinel/create_standard_product_s1.py +++ b/interferogram/sentinel/create_standard_product_s1.py @@ -1688,7 +1688,7 @@ def main(): md['sensingStart'] = sensing_start md['sensingStop'] = sensing_stop md['tags'] = ['standard_product'] - md['polarization']= match_pol + md['polarization']= match_pol.upper() md['reference_date'] = get_date_str(ctx['slc_master_dt']) md['secondary_date'] = get_date_str(ctx['slc_slave_dt']) From 9edb03ff56fe6e0d842982b19c7f7c3f63888599 Mon Sep 17 00:00:00 2001 From: mohammed karim Date: Thu, 21 Nov 2019 19:27:14 +0000 Subject: [PATCH 4/5] added version checking for duplicate checking --- .../sentinel/create_standard_product_s1.py | 64 +++++++++++++++++-- 1 file changed, 59 insertions(+), 5 deletions(-) diff --git a/interferogram/sentinel/create_standard_product_s1.py b/interferogram/sentinel/create_standard_product_s1.py index 9e58320..941a0bd 100644 --- a/interferogram/sentinel/create_standard_product_s1.py +++ b/interferogram/sentinel/create_standard_product_s1.py @@ -113,6 +113,7 @@ def check_ifg_status(ifg_id): logger.info("check_slc_status : returning False") return False + def get_dataset_by_hash(ifg_hash, es_index="grq"): """Query for existence of dataset by ID.""" @@ -153,6 +154,47 @@ def get_dataset_by_hash(ifg_hash, es_index="grq"): logger.info(result['hits']['total']) return result +def get_dataset_by_hash_version(ifg_hash, version, es_index="grq"): + """Query for existence of dataset by ID.""" + + uu = UrlUtils() + es_url = uu.rest_url + #es_index = "{}_{}_s1-ifg".format(uu.grq_index_prefix, version) + + # query + query = { + "query":{ + "bool":{ + "must":[ + { "term":{"metadata.full_id_hash.raw": ifg_hash} }, + { "term":{"dataset.raw": "S1-GUNW"} }, + { "term":{"version.raw": version} } + ] + } + } + + } + + logger.info(query) + + if es_url.endswith('/'): + search_url = '%s%s/_search' % (es_url, es_index) + else: + search_url = '%s/%s/_search' % (es_url, es_index) + logger.info("search_url : %s" %search_url) + + r = requests.post(search_url, data=json.dumps(query)) + r.raise_for_status() + + if r.status_code != 200: + logger.info("Failed to query %s:\n%s" % (es_url, r.text)) + logger.info("query: %s" % json.dumps(query, indent=2)) + logger.info("returned: %s" % r.text) + raise RuntimeError("Failed to query %s:\n%s" % (es_url, r.text)) + result = r.json() + logger.info(result['hits']['total']) + return result + def fileContainsMsg(file_name, msg): with open(file_name, 'r') as f: datafile = f.readlines() @@ -207,6 +249,18 @@ def check_ifg_status_by_hash(new_ifg_hash): logger.info("check_slc_status : returning False") return False +def check_ifg_status_by_hash_version(new_ifg_hash, version): + es_index="grq_*_s1-gunw" + result = get_dataset_by_hash_version(new_ifg_hash, version, es_index) + total = result['hits']['total'] + logger.info("check_slc_status_by_hash : total : %s" %total) + if total>0: + found_id = result['hits']['hits'][0]["_id"] + logger.info("Duplicate dataset found: %s" %found_id) + sys.exit(0) + + logger.info("check_slc_status : returning False") + return False def update_met(md): @@ -928,7 +982,7 @@ def main(): raise RuntimeError(err) ''' - if check_ifg_status_by_hash(new_ifg_hash): + if check_ifg_status_by_hash_version(new_ifg_hash, get_version()): err = "S1-GUNW IFG Found : %s" %temp_ifg_id logger.info(err) raise RuntimeError(err) @@ -1036,7 +1090,9 @@ def main(): ned13_dem_url = uu.ned13_dem_url dem_user = uu.dem_u dem_pass = uu.dem_p + + do_esd = False preprocess_dem_dir="preprocess_dem" geocode_dem_dir="geocode_dem" @@ -1057,13 +1113,12 @@ def main(): dem_E = int(math.ceil(dem_E)) logger.info("DEM TYPE : %s" %dem_type) - if dem_type.startswith("SRTM"): dem_type_simple = "SRTM" if dem_type.startswith("SRTM3"): dem_url = srtm3_dem_url dem_type_simple = "SRTM3" - + dem_cmd = [ "{}/applications/dem.py".format(os.environ['ISCE_HOME']), "-a", "stitch", "-b", "{} {} {} {}".format(dem_S, dem_N, dem_W, dem_E), @@ -1086,7 +1141,6 @@ def main(): if dem_type == "NED13-downsampled": downsample_option = "-d 33%" else: downsample_option = "" - dem_S = dem_S - 1 if dem_S > -89 else dem_S dem_N = dem_N + 1 if dem_N < 89 else dem_N dem_W = dem_W - 1 if dem_W > -179 else dem_W @@ -1203,7 +1257,7 @@ def main(): check_call(aux_cmd_line, shell=True) # create initial input xml - do_esd = True + do_esd = False esd_coh_th = 0.85 xml_file = "topsApp.xml" create_input_xml(os.path.join(BASE_PATH, 'topsApp_standard_product.xml.tmpl'), xml_file, From 62520ec090ad3727ba83fd3fafc69b8ca5714235 Mon Sep 17 00:00:00 2001 From: mohammed karim Date: Thu, 21 Nov 2019 19:31:04 +0000 Subject: [PATCH 5/5] temp changes reversed --- interferogram/sentinel/create_standard_product_s1.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/interferogram/sentinel/create_standard_product_s1.py b/interferogram/sentinel/create_standard_product_s1.py index 941a0bd..f5d332a 100644 --- a/interferogram/sentinel/create_standard_product_s1.py +++ b/interferogram/sentinel/create_standard_product_s1.py @@ -1092,7 +1092,6 @@ def main(): dem_pass = uu.dem_p - do_esd = False preprocess_dem_dir="preprocess_dem" geocode_dem_dir="geocode_dem" @@ -1257,7 +1256,7 @@ def main(): check_call(aux_cmd_line, shell=True) # create initial input xml - do_esd = False + do_esd = True esd_coh_th = 0.85 xml_file = "topsApp.xml" create_input_xml(os.path.join(BASE_PATH, 'topsApp_standard_product.xml.tmpl'), xml_file,