Skip to content

Commit

Permalink
upload: pytest error formating address. rucio#6475
Browse files Browse the repository at this point in the history
  • Loading branch information
panta-123 committed Jan 30, 2024
1 parent b0c599e commit bdf2a9f
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 12 deletions.
2 changes: 1 addition & 1 deletion bin/rucio
Original file line number Diff line number Diff line change
Expand Up @@ -927,7 +927,7 @@ def upload(args):
if args.dirac:
if args.lifetime:
logger.warning("Ignoring --lifetime as --dirac is set where the lifetime is taken from cfg file for dataset")

dsscope = None
dsname = None
for arg in args.args:
Expand Down
24 changes: 13 additions & 11 deletions lib/rucio/client/uploadclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,12 +149,12 @@ def _pick_random_rse(rse_expression):
if file['did_scope']:
if file['did_scope'] != scope:
logger(logging.WARNING, 'replacing scope %s provided for the file %s and using the proper scope %s from extract_scope algorithm.\
' % (str( file['did_scope']), str(file['did_name']), str(scope)))
' % (str(file['did_scope']), str(file['did_name']), str(scope)))
if dataset_scope and dataset_name:
scope, _ = extract_scope(dataset_name, scopes=scopes)
if dataset_scope!= scope:
logger(logging.WARNING,'replacing scope %s provided for the dataset %s and using the proper scope %s from extract_scope algorithm.\
'% (str( dataset_scope), str(dataset_name), str(scope)))
if dataset_scope != scope:
logger(logging.WARNING, 'replacing scope %s provided for the dataset %s and using the proper scope %s from extract_scope algorithm.\
' % (str(dataset_scope), str(dataset_name), str(scope)))

# clear this set again to ensure that we only try to register datasets once
registered_dataset_dids = set()
Expand Down Expand Up @@ -365,7 +365,7 @@ def _add_bittorrent_meta(self, file, logger):
}
self.client.set_metadata_bulk(scope=file['did_scope'], name=file['did_name'], meta=bittorrent_meta)

def _register_file(self, file, registered_dataset_dids, ignore_availability=False, activity=None, dirac:bool = False):
def _register_file(self, file, registered_dataset_dids, ignore_availability=False, activity=None, dirac: bool = False):
"""
Registers the given file in Rucio. Creates a dataset if
needed. Registers the file DID and creates the replication
Expand Down Expand Up @@ -413,7 +413,7 @@ def _register_file(self, file, registered_dataset_dids, ignore_availability=Fals

if file.get('lifetime') is not None:
raise InputValidationError('Dataset %s exists and lifetime %s given. Prohibited to modify parent dataset lifetime.' % (dataset_did_str,
file.get('lifetime')))
file.get('lifetime')))
else:
logger(logging.DEBUG, 'Skipping dataset registration')
file_scope = file['did_scope']
Expand Down Expand Up @@ -446,7 +446,7 @@ def _register_file(self, file, registered_dataset_dids, ignore_availability=Fals
parents_metadata = None
if file.get('dataset_meta'):
parents_metadata = {file['dataset_name']: file['dataset_meta']}
self.client.add_files([replica_for_api_dirac], ignore_availability=ignore_availability, parents_metadata= parents_metadata)
self.client.add_files([replica_for_api_dirac], ignore_availability=ignore_availability, parents_metadata=parents_metadata)
self._add_bittorrent_meta(file=file, logger=logger)
logger(logging.INFO, 'Successfully added replica in Rucio catalogue at %s' % rse)
if not dirac:
Expand Down Expand Up @@ -493,6 +493,8 @@ def _collect_file_info(self, filepath, item, dirac: bool = False):
:returns: a dictionary containing all collected info and the input options
"""
logger = self.logger
logger(logging.DEBUG, 'Collecting file info')
new_item = copy.deepcopy(item)
new_item['path'] = filepath
new_item['dirname'] = os.path.dirname(filepath)
Expand All @@ -506,8 +508,8 @@ def _collect_file_info(self, filepath, item, dirac: bool = False):
if dirac:
if new_item.get('did_name') and new_item.get('dataset_name'):
# check for the dataset name compatibility with the DID name.
logger(logging.INFO, 'Checking if dataset name %s is compatible with DID name %) %s' % (new_item['dataset_name'], new_item['did_name']))
if new_item.get('did_name').rsplit('/',1)[0] == new_item.get('dataset_name'):
logger(logging.INFO, 'Checking if dataset name %s is compatible with DID name %s' % (new_item['dataset_name'], new_item['did_name']))
if new_item.get('did_name').rsplit('/', 1)[0] == new_item.get('dataset_name'):
logger(logging.INFO, 'Dataset name %s is compatible with DID name %s' % (new_item['dataset_name'], new_item['did_name']))
else:
logger(logging.ERROR, 'Dataset name %s is not compatible with file name %s' % (new_item['dataset_name'], new_item['did_name']))
Expand Down Expand Up @@ -571,7 +573,7 @@ def _collect_and_validate_file_info(self, items, dirac: bool = False):
elif not len(fnames):
logger(logging.WARNING, 'Skipping %s because it has no files in it. Subdirectories are not supported.' % dname)
elif dirac and recursive:
logger(logging.WARNING, '. Skipping %s because dirac and recursive flag combined is not supported')
logger(logging.WARNING, 'Skipping %s because dirac and recursive flag combined is not supported' % path)
elif os.path.isdir(path) and recursive:
files.extend(self._recursive(item))
elif os.path.isfile(path) and not recursive:
Expand Down Expand Up @@ -609,7 +611,7 @@ def _convert_file_for_api(self, file):
if pfn:
replica['pfn'] = pfn
return replica

def _convert_file_for_dirac(self, file: dict) -> dict:
"""
Creates a new dictionary that contains only the values
Expand Down

0 comments on commit bdf2a9f

Please sign in to comment.