From bdf2a9f07913a21acfce4fda78d222b611bb592a Mon Sep 17 00:00:00 2001 From: Anil Panta Date: Tue, 30 Jan 2024 16:44:14 -0500 Subject: [PATCH] upload: pytest error formating address. #6475 --- bin/rucio | 2 +- lib/rucio/client/uploadclient.py | 24 +++++++++++++----------- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/bin/rucio b/bin/rucio index 3ec120bcc6f..04f05280865 100755 --- a/bin/rucio +++ b/bin/rucio @@ -927,7 +927,7 @@ def upload(args): if args.dirac: if args.lifetime: logger.warning("Ignoring --lifetime as --dirac is set where the lifetime is taken from cfg file for dataset") - + dsscope = None dsname = None for arg in args.args: diff --git a/lib/rucio/client/uploadclient.py b/lib/rucio/client/uploadclient.py index cf31ef43512..b7a50086a87 100644 --- a/lib/rucio/client/uploadclient.py +++ b/lib/rucio/client/uploadclient.py @@ -149,12 +149,12 @@ def _pick_random_rse(rse_expression): if file['did_scope']: if file['did_scope'] != scope: logger(logging.WARNING, 'replacing scope %s provided for the file %s and using the proper scope %s from extract_scope algorithm.\ - ' % (str( file['did_scope']), str(file['did_name']), str(scope))) + ' % (str(file['did_scope']), str(file['did_name']), str(scope))) if dataset_scope and dataset_name: scope, _ = extract_scope(dataset_name, scopes=scopes) - if dataset_scope!= scope: - logger(logging.WARNING,'replacing scope %s provided for the dataset %s and using the proper scope %s from extract_scope algorithm.\ - '% (str( dataset_scope), str(dataset_name), str(scope))) + if dataset_scope != scope: + logger(logging.WARNING, 'replacing scope %s provided for the dataset %s and using the proper scope %s from extract_scope algorithm.\ + ' % (str(dataset_scope), str(dataset_name), str(scope))) # clear this set again to ensure that we only try to register datasets once registered_dataset_dids = set() @@ -365,7 +365,7 @@ def _add_bittorrent_meta(self, file, logger): } self.client.set_metadata_bulk(scope=file['did_scope'], name=file['did_name'], meta=bittorrent_meta) - def _register_file(self, file, registered_dataset_dids, ignore_availability=False, activity=None, dirac:bool = False): + def _register_file(self, file, registered_dataset_dids, ignore_availability=False, activity=None, dirac: bool = False): """ Registers the given file in Rucio. Creates a dataset if needed. Registers the file DID and creates the replication @@ -413,7 +413,7 @@ def _register_file(self, file, registered_dataset_dids, ignore_availability=Fals if file.get('lifetime') is not None: raise InputValidationError('Dataset %s exists and lifetime %s given. Prohibited to modify parent dataset lifetime.' % (dataset_did_str, - file.get('lifetime'))) + file.get('lifetime'))) else: logger(logging.DEBUG, 'Skipping dataset registration') file_scope = file['did_scope'] @@ -446,7 +446,7 @@ def _register_file(self, file, registered_dataset_dids, ignore_availability=Fals parents_metadata = None if file.get('dataset_meta'): parents_metadata = {file['dataset_name']: file['dataset_meta']} - self.client.add_files([replica_for_api_dirac], ignore_availability=ignore_availability, parents_metadata= parents_metadata) + self.client.add_files([replica_for_api_dirac], ignore_availability=ignore_availability, parents_metadata=parents_metadata) self._add_bittorrent_meta(file=file, logger=logger) logger(logging.INFO, 'Successfully added replica in Rucio catalogue at %s' % rse) if not dirac: @@ -493,6 +493,8 @@ def _collect_file_info(self, filepath, item, dirac: bool = False): :returns: a dictionary containing all collected info and the input options """ + logger = self.logger + logger(logging.DEBUG, 'Collecting file info') new_item = copy.deepcopy(item) new_item['path'] = filepath new_item['dirname'] = os.path.dirname(filepath) @@ -506,8 +508,8 @@ def _collect_file_info(self, filepath, item, dirac: bool = False): if dirac: if new_item.get('did_name') and new_item.get('dataset_name'): # check for the dataset name compatibility with the DID name. - logger(logging.INFO, 'Checking if dataset name %s is compatible with DID name %) %s' % (new_item['dataset_name'], new_item['did_name'])) - if new_item.get('did_name').rsplit('/',1)[0] == new_item.get('dataset_name'): + logger(logging.INFO, 'Checking if dataset name %s is compatible with DID name %s' % (new_item['dataset_name'], new_item['did_name'])) + if new_item.get('did_name').rsplit('/', 1)[0] == new_item.get('dataset_name'): logger(logging.INFO, 'Dataset name %s is compatible with DID name %s' % (new_item['dataset_name'], new_item['did_name'])) else: logger(logging.ERROR, 'Dataset name %s is not compatible with file name %s' % (new_item['dataset_name'], new_item['did_name'])) @@ -571,7 +573,7 @@ def _collect_and_validate_file_info(self, items, dirac: bool = False): elif not len(fnames): logger(logging.WARNING, 'Skipping %s because it has no files in it. Subdirectories are not supported.' % dname) elif dirac and recursive: - logger(logging.WARNING, '. Skipping %s because dirac and recursive flag combined is not supported') + logger(logging.WARNING, 'Skipping %s because dirac and recursive flag combined is not supported' % path) elif os.path.isdir(path) and recursive: files.extend(self._recursive(item)) elif os.path.isfile(path) and not recursive: @@ -609,7 +611,7 @@ def _convert_file_for_api(self, file): if pfn: replica['pfn'] = pfn return replica - + def _convert_file_for_dirac(self, file: dict) -> dict: """ Creates a new dictionary that contains only the values