@@ -420,25 +420,26 @@ def _scan_and_create_links(self, link_class):
420
420
# w/o Dataflow
421
421
# pubsub.push(topic, key)
422
422
# -> GF.pull(topic, key) + env -> add_task()
423
- logger .debug (f 'Scanning key { key } ' )
423
+ logger .debug ('Scanning key %s' , key )
424
424
self .info_update_progress (last_sync_count = tasks_created , tasks_existed = tasks_existed )
425
425
426
426
# skip if task already exists
427
427
if link_class .exists (key , self ):
428
- logger .debug (f' { self . __class__ . __name__ } link { key } already exists' )
428
+ logger .debug ('%s link %s already exists', self . __class__ . __name__ , key )
429
429
tasks_existed += 1 # update progress counter
430
430
continue
431
431
432
- logger .debug (f' { self } : found new key { key } ' )
432
+ logger .debug ('%s : found new key %s' , self . __class__ . __name__ , key )
433
433
try :
434
434
tasks_data = self .get_data (key )
435
435
except (UnicodeDecodeError , json .decoder .JSONDecodeError ) as exc :
436
436
logger .debug (exc , exc_info = True )
437
- raise ValueError (
437
+ logger . warning (
438
438
f'Error loading JSON from file "{ key } ".\n If you\' re trying to import non-JSON data '
439
439
f'(images, audio, text, etc.), edit storage settings and enable '
440
440
f'"Treat every bucket object as a source file"'
441
441
)
442
+ continue
442
443
443
444
if not flag_set ('fflag_feat_dia_2092_multitasks_per_storage_link' ):
444
445
tasks_data = tasks_data [:1 ]
0 commit comments