@@ -323,6 +323,18 @@ def offline_fg_materialization(spark: SparkSession, job_conf: Dict[Any, Any], in
323
323
offset_df = spark .createDataFrame ([offset_dict ])
324
324
offset_df .coalesce (1 ).write .mode ("overwrite" ).json (offset_location )
325
325
326
+ def update_table_schema_fg (spark : SparkSession , job_conf : Dict [Any , Any ]) -> None :
327
+ """
328
+ Run table schema update job on a feature group.
329
+ """
330
+ feature_store = job_conf .pop ("feature_store" )
331
+ fs = get_feature_store_handle (feature_store )
332
+
333
+ entity = fs .get_feature_group (name = job_conf ["name" ], version = job_conf ["version" ])
334
+
335
+ entity .stream = False
336
+ engine .get_instance ().update_table_schema (entity )
337
+
326
338
def _build_starting_offsets (initial_check_point_string : str ):
327
339
if not initial_check_point_string :
328
340
return ""
@@ -358,6 +370,7 @@ def _build_starting_offsets(initial_check_point_string: str):
358
370
"run_feature_monitoring" ,
359
371
"delta_vacuum_fg" ,
360
372
"offline_fg_materialization" ,
373
+ "update_table_schema_fg" ,
361
374
],
362
375
help = "Operation type" ,
363
376
)
@@ -406,6 +419,8 @@ def parse_isoformat_date(da: str) -> datetime:
406
419
delta_vacuum_fg (spark , job_conf )
407
420
elif args .op == "offline_fg_materialization" :
408
421
offline_fg_materialization (spark , job_conf , args .initialCheckPointString )
422
+ elif args .op == "update_table_schema_fg" :
423
+ update_table_schema_fg (spark , job_conf )
409
424
410
425
success = True
411
426
except Exception :
0 commit comments