diff --git a/sdk/python/feast/cli/cli.py b/sdk/python/feast/cli/cli.py index 1ef6ac7fbda..712e3905c3b 100644 --- a/sdk/python/feast/cli/cli.py +++ b/sdk/python/feast/cli/cli.py @@ -47,6 +47,7 @@ from feast.cli.stream_feature_views import stream_feature_views_cmd from feast.cli.ui import ui from feast.cli.validation_references import validation_references_cmd +from feast.constants import FEAST_FS_YAML_FILE_PATH_ENV_NAME from feast.errors import FeastProviderLoginError from feast.repo_config import load_repo_config from feast.repo_operations import ( @@ -81,7 +82,8 @@ def format_options(self, ctx: click.Context, formatter: click.HelpFormatter): @click.option( "--chdir", "-c", - help="Switch to a different feature repository directory before executing the given subcommand.", + envvar="FEATURE_REPO_DIR_ENV_VAR", + help="Switch to a different feature repository directory before executing the given subcommand. Can also be set via the FEATURE_REPO_DIR_ENV_VAR environment variable.", ) @click.option( "--log-level", @@ -91,7 +93,7 @@ def format_options(self, ctx: click.Context, formatter: click.HelpFormatter): @click.option( "--feature-store-yaml", "-f", - help="Override the directory where the CLI should look for the feature_store.yaml file.", + help=f"Override the directory where the CLI should look for the feature_store.yaml file. Can also be set via the {FEAST_FS_YAML_FILE_PATH_ENV_NAME} environment variable.", ) @click.pass_context def cli( diff --git a/sdk/python/tests/unit/transformation/test_spark_transformation.py b/sdk/python/tests/unit/transformation/test_spark_transformation.py index 8ee9d22bf28..63954faef2f 100644 --- a/sdk/python/tests/unit/transformation/test_spark_transformation.py +++ b/sdk/python/tests/unit/transformation/test_spark_transformation.py @@ -53,7 +53,10 @@ def remove_extra_spaces_sql(df, column_name): @pytest.fixture def spark_fixture(): spark = SparkSession.builder.appName("Testing PySpark Example").getOrCreate() - yield spark + try: + yield spark + finally: + spark.stop() @patch("feast.infra.compute_engines.spark.utils.get_or_create_new_spark_session")