@@ -55,7 +55,7 @@ def register_temporary_table(self, delta_fg_alias, read_options):
55
55
delta_options = self ._setup_delta_read_opts (delta_fg_alias , read_options )
56
56
self ._spark_session .read .format (self .DELTA_SPARK_FORMAT ).options (
57
57
** delta_options
58
- ).load (self ._feature_group .uri ).createOrReplaceTempView (
58
+ ).load (self ._feature_group .get_uri () ).createOrReplaceTempView (
59
59
delta_fg_alias .alias
60
60
)
61
61
@@ -86,14 +86,14 @@ def _setup_delta_read_opts(self, delta_fg_alias, read_options):
86
86
87
87
def delete_record (self , delete_df ):
88
88
if not DeltaTable .isDeltaTable (
89
- self ._spark_session , self ._feature_group .uri
89
+ self ._spark_session , self ._feature_group .get_uri ()
90
90
):
91
91
raise FeatureStoreException (
92
92
f"This is no data available in Feature group { self ._feature_group .name } , or it not DELTA enabled "
93
93
)
94
94
else :
95
95
fg_source_table = DeltaTable .forPath (
96
- self ._spark_session , self ._feature_group .uri
96
+ self ._spark_session , self ._feature_group .get_uri ()
97
97
)
98
98
99
99
source_alias = (
@@ -109,7 +109,7 @@ def delete_record(self, delete_df):
109
109
).whenMatchedDelete ().execute ()
110
110
111
111
fg_commit = self ._get_last_commit_metadata (
112
- self ._spark_session , self ._feature_group .uri
112
+ self ._spark_session , self ._feature_group .get_uri ()
113
113
)
114
114
return self ._feature_group_api .commit (self ._feature_group , fg_commit )
115
115
@@ -118,7 +118,7 @@ def _write_delta_dataset(self, dataset, write_options):
118
118
write_options = {}
119
119
120
120
if not DeltaTable .isDeltaTable (
121
- self ._spark_session , self ._feature_group .uri
121
+ self ._spark_session , self ._feature_group .get_uri ()
122
122
):
123
123
(
124
124
dataset .write .format (DeltaEngine .DELTA_SPARK_FORMAT )
@@ -129,11 +129,11 @@ def _write_delta_dataset(self, dataset, write_options):
129
129
else []
130
130
)
131
131
.mode ("append" )
132
- .save (self ._feature_group .uri )
132
+ .save (self ._feature_group .get_uri () )
133
133
)
134
134
else :
135
135
fg_source_table = DeltaTable .forPath (
136
- self ._spark_session , self ._feature_group .uri
136
+ self ._spark_session , self ._feature_group .get_uri ()
137
137
)
138
138
139
139
source_alias = (
@@ -149,7 +149,7 @@ def _write_delta_dataset(self, dataset, write_options):
149
149
).whenMatchedUpdateAll ().whenNotMatchedInsertAll ().execute ()
150
150
151
151
return self ._get_last_commit_metadata (
152
- self ._spark_session , self ._feature_group .uri
152
+ self ._spark_session , self ._feature_group .get_uri ()
153
153
)
154
154
155
155
def _generate_merge_query (self , source_alias , updates_alias ):
0 commit comments