Skip to content

Commit e1e72c9

Browse files
committed
Move typing-only imports under if TYPE_CHECKING
1 parent c936075 commit e1e72c9

8 files changed

+149
-123
lines changed

python/hsfs/connection.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,9 @@
1717

1818
import importlib.util
1919
import os
20-
from typing import Any, Optional
20+
from typing import TYPE_CHECKING, Any, Optional
2121

22-
from hsfs import client, engine, feature_store, usage, util
22+
from hsfs import client, engine, usage, util
2323
from hsfs.core import (
2424
feature_store_api,
2525
hosts_api,
@@ -32,6 +32,10 @@
3232
from requests.exceptions import ConnectionError
3333

3434

35+
if TYPE_CHECKING:
36+
from hsfs import feature_store
37+
38+
3539
AWS_DEFAULT_REGION = "default"
3640
HOPSWORKS_PORT_DEFAULT = 443
3741
SECRETS_STORE_DEFAULT = "parameterstore"

python/hsfs/constructor/fs_query.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -15,13 +15,17 @@
1515
#
1616
from __future__ import annotations
1717

18-
from typing import Any, Dict, List, Optional, TypeVar, Union
18+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
1919

2020
import humps
2121
from hsfs import engine
2222
from hsfs.constructor import external_feature_group_alias, hudi_feature_group_alias
2323

2424

25+
if TYPE_CHECKING:
26+
import pyspark
27+
28+
2529
class FsQuery:
2630
def __init__(
2731
self,
@@ -96,7 +100,7 @@ def hudi_cached_feature_groups(
96100
def register_external(
97101
self,
98102
spine: Optional[
99-
Union[TypeVar("pyspark.sql.DataFrame"), TypeVar("pyspark.RDD")]
103+
Union[pyspark.sql.DataFrame, pyspark.RDD]
100104
] = None,
101105
) -> None:
102106
if self._on_demand_fg_aliases is None:

python/hsfs/constructor/query.py

+21-16
Original file line numberDiff line numberDiff line change
@@ -18,20 +18,25 @@
1818
import json
1919
import warnings
2020
from datetime import date, datetime
21-
from typing import Any, Dict, List, Optional, Tuple, TypeVar, Union
21+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
2222

2323
import humps
24-
import numpy as np
25-
import pandas as pd
26-
from hsfs import engine, storage_connector, util
24+
from hsfs import engine, util
2725
from hsfs import feature_group as fg_mod
2826
from hsfs.client.exceptions import FeatureStoreException
2927
from hsfs.constructor import join
3028
from hsfs.constructor.filter import Filter, Logic
31-
from hsfs.constructor.fs_query import FsQuery
3229
from hsfs.core import arrow_flight_client, query_constructor_api, storage_connector_api
3330
from hsfs.decorators import typechecked
34-
from hsfs.feature import Feature
31+
32+
33+
if TYPE_CHECKING:
34+
import numpy as np
35+
import pandas as pd
36+
import pyspark
37+
from hsfs import storage_connector
38+
from hsfs.constructor.fs_query import FsQuery
39+
from hsfs.feature import Feature
3540

3641

3742
@typechecked
@@ -59,7 +64,7 @@ def __init__(
5964
fg_mod.ExternalFeatureGroup,
6065
fg_mod.SpineGroup,
6166
],
62-
left_features: List[Union[str, "Feature", Dict]],
67+
left_features: List[Union[str, Feature, Dict]],
6368
feature_store_name: Optional[str] = None,
6469
feature_store_id: Optional[int] = None,
6570
left_feature_group_start_time: Optional[Union[str, int, date, datetime]] = None,
@@ -80,14 +85,14 @@ def __init__(
8085
self._query_constructor_api: "query_constructor_api.QueryConstructorApi" = (
8186
query_constructor_api.QueryConstructorApi()
8287
)
83-
self._storage_connector_api: "storage_connector_api.StorageConnectorApi" = (
88+
self._storage_connector_api: storage_connector_api.StorageConnectorApi = (
8489
storage_connector_api.StorageConnectorApi()
8590
)
8691

8792
def _prep_read(
8893
self, online: bool, read_options: Dict[str, Any]
8994
) -> Tuple[
90-
Union[str, Dict[str, Any]], Optional["storage_connector.StorageConnector"]
95+
Union[str, Dict[str, Any]], Optional[storage_connector.StorageConnector]
9196
]:
9297
self._check_read_supported(online)
9398
fs_query = self._query_constructor_api.construct_query(self)
@@ -141,8 +146,8 @@ def read(
141146
pd.DataFrame,
142147
np.ndarray,
143148
List[List[Any]],
144-
TypeVar("pyspark.sql.DataFrame"),
145-
TypeVar("pyspark.RDD"),
149+
pyspark.sql.DataFrame,
150+
pyspark.RDD,
146151
]:
147152
"""Read the specified query into a DataFrame.
148153
@@ -595,7 +600,7 @@ def to_string(self, online: bool = False, arrow_flight: bool = False) -> str:
595600
return self._to_string(fs_query, online, arrow_flight)
596601

597602
def _to_string(
598-
self, fs_query: "FsQuery", online: bool = False, asof: bool = False
603+
self, fs_query: FsQuery, online: bool = False, asof: bool = False
599604
) -> str:
600605
if online:
601606
return fs_query.query_online
@@ -633,7 +638,7 @@ def left_feature_group_end_time(
633638
) -> None:
634639
self._left_feature_group_end_time = left_feature_group_end_time
635640

636-
def append_feature(self, feature: Union[str, "Feature"]) -> "Query":
641+
def append_feature(self, feature: Union[str, Feature]) -> "Query":
637642
"""
638643
Append a feature to the query.
639644
@@ -699,7 +704,7 @@ def _get_feature_by_name(
699704
self,
700705
feature_name: str,
701706
) -> Tuple[
702-
"Feature",
707+
Feature,
703708
Optional[str],
704709
Union[
705710
fg_mod.FeatureGroup,
@@ -785,7 +790,7 @@ def filters(self) -> Optional[Logic]:
785790
return filters
786791

787792
@property
788-
def features(self) -> List["Feature"]:
793+
def features(self) -> List[Feature]:
789794
"""List of all features in the query"""
790795
features = []
791796
for feat in self._left_features:
@@ -797,7 +802,7 @@ def features(self) -> List["Feature"]:
797802

798803
return features
799804

800-
def get_feature(self, feature_name: str) -> "Feature":
805+
def get_feature(self, feature_name: str) -> Feature:
801806
"""
802807
Get a feature by name.
803808

python/hsfs/core/vector_db_client.py

+5-2
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,17 @@
1717

1818
import base64
1919
from datetime import datetime
20-
from typing import Any, Dict, List, Optional, Tuple, Union
20+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
2121

2222
import hsfs
2323
from hsfs.client.exceptions import FeatureStoreException, VectorDatabaseException
2424
from hsfs.constructor.filter import Filter, Logic
2525
from hsfs.constructor.join import Join
2626
from hsfs.core.opensearch import OpenSearchClientSingleton
27-
from hsfs.feature import Feature
27+
28+
29+
if TYPE_CHECKING:
30+
from hsfs.feature import Feature
2831

2932

3033
class VectorDbClient:

python/hsfs/feature.py

+17-14
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,18 @@
1717

1818
import json
1919
from datetime import datetime
20-
from typing import Any, Dict, List, Optional, Union
20+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
2121

22-
import hsfs
2322
import humps
2423
from hsfs import util
2524
from hsfs.constructor import filter
2625
from hsfs.decorators import typechecked
2726

2827

28+
if TYPE_CHECKING:
29+
import hsfs
30+
31+
2932
@typechecked
3033
class Feature:
3134
"""Metadata object representing a feature in a feature group in the Feature Store.
@@ -49,9 +52,9 @@ def __init__(
4952
feature_group_id: Optional[int] = None,
5053
feature_group: Optional[
5154
Union[
52-
"hsfs.feature_group.FeatureGroup",
53-
"hsfs.feature_group.ExternalFeatureGroup",
54-
"hsfs.feature_group.SpineGroup",
55+
hsfs.feature_group.FeatureGroup,
56+
hsfs.feature_group.ExternalFeatureGroup,
57+
hsfs.feature_group.SpineGroup,
5558
]
5659
] = None,
5760
**kwargs,
@@ -216,41 +219,41 @@ def _get_filter_value(self, value: Any) -> Any:
216219
else:
217220
return value
218221

219-
def __lt__(self, other: Any) -> "filter.Filter":
222+
def __lt__(self, other: Any) -> filter.Filter:
220223
return filter.Filter(self, filter.Filter.LT,
221224
self._get_filter_value(other))
222225

223-
def __le__(self, other: Any) -> "filter.Filter":
226+
def __le__(self, other: Any) -> filter.Filter:
224227
return filter.Filter(self, filter.Filter.LE,
225228
self._get_filter_value(other))
226229

227-
def __eq__(self, other: Any) -> "filter.Filter":
230+
def __eq__(self, other: Any) -> filter.Filter:
228231
return filter.Filter(self, filter.Filter.EQ,
229232
self._get_filter_value(other))
230233

231-
def __ne__(self, other: Any) -> "filter.Filter":
234+
def __ne__(self, other: Any) -> filter.Filter:
232235
return filter.Filter(self, filter.Filter.NE,
233236
self._get_filter_value(other))
234237

235-
def __ge__(self, other: Any) -> "filter.Filter":
238+
def __ge__(self, other: Any) -> filter.Filter:
236239
return filter.Filter(self, filter.Filter.GE,
237240
self._get_filter_value(other))
238241

239-
def __gt__(self, other: Any) -> "filter.Filter":
242+
def __gt__(self, other: Any) -> filter.Filter:
240243
return filter.Filter(self, filter.Filter.GT,
241244
self._get_filter_value(other))
242245

243-
def contains(self, other: Union[str, List[Any]]) -> "filter.Filter":
246+
def contains(self, other: Union[str, List[Any]]) -> filter.Filter:
244247
"""
245248
!!! warning "Deprecated"
246249
`contains` method is deprecated. Use `isin` instead.
247250
"""
248251
return self.isin(other)
249252

250-
def isin(self, other: Union[str, List[Any]]) -> "filter.Filter":
253+
def isin(self, other: Union[str, List[Any]]) -> filter.Filter:
251254
return filter.Filter(self, filter.Filter.IN, json.dumps(other))
252255

253-
def like(self, other: Any) -> "filter.Filter":
256+
def like(self, other: Any) -> filter.Filter:
254257
return filter.Filter(self, filter.Filter.LK, other)
255258

256259
def __str__(self) -> str:

0 commit comments

Comments
 (0)