Skip to content

Commit

Permalink
(fix) lint error import pyspark DataFrame
Browse files Browse the repository at this point in the history
  • Loading branch information
ElliotNguyen68 committed Mar 6, 2024
1 parent 388413c commit 308c175
Showing 1 changed file with 5 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import pyspark
from pydantic import StrictStr
from pyspark import SparkConf
from pyspark.sql import SparkSession, DataFrame as SparkDataFrame
from pyspark.sql import SparkSession
from pytz import utc

from feast import FeatureView, OnDemandFeatureView
Expand Down Expand Up @@ -125,7 +125,7 @@ def get_historical_features(
config: RepoConfig,
feature_views: List[FeatureView],
feature_refs: List[str],
entity_df: Union[pandas.DataFrame, str, SparkDataFrame],
entity_df: Union[pandas.DataFrame, str, pyspark.sql.DataFrame],
registry: Registry,
project: str,
full_feature_names: bool = False,
Expand Down Expand Up @@ -473,7 +473,7 @@ def _get_entity_df_event_timestamp_range(
entity_df_event_timestamp.min().to_pydatetime(),
entity_df_event_timestamp.max().to_pydatetime(),
)
elif isinstance(entity_df, str) or isinstance(entity_df, SparkDataFrame):
elif isinstance(entity_df, str) or isinstance(entity_df, pyspark.sql.DataFrame):
# If the entity_df is a string (SQL query), determine range
# from table
if isinstance(entity_df, str):
Expand Down Expand Up @@ -501,7 +501,7 @@ def _get_entity_schema(
) -> Dict[str, np.dtype]:
if isinstance(entity_df, pd.DataFrame):
return dict(zip(entity_df.columns, entity_df.dtypes))
elif isinstance(entity_df, str) or isinstance(entity_df,SparkDataFrame):
elif isinstance(entity_df, str) or isinstance(entity_df,pyspark.sql.DataFrame):
if isinstance(entity_df, str):
entity_spark_df = spark_session.sql(entity_df)
else:
Expand Down Expand Up @@ -530,7 +530,7 @@ def _upload_entity_df(
return
elif isinstance(entity_df, str):
spark_session.sql(entity_df).createOrReplaceTempView(table_name)
elif isinstance(entity_df, SparkDataFrame):
elif isinstance(entity_df, pyspark.sql.DataFrame):
entity_df.createOrReplaceTempView(table_name)
return
else:
Expand Down

0 comments on commit 308c175

Please sign in to comment.