@@ -65,7 +65,25 @@ def create_schema(kafka_broker, topic_name, feature_table_name):
6565 return entity , feature_table
6666
6767
68- def test_validation_with_ge (feast_client : Client , kafka_server ):
68+ def start_job (feast_client : Client , feature_table : FeatureTable , pytestconfig ):
69+ if pytestconfig .getoption ("scheduled_streaming_job" ):
70+ return
71+
72+ job = feast_client .start_stream_to_online_ingestion (feature_table )
73+ wait_retry_backoff (
74+ lambda : (None , job .get_status () == SparkJobStatus .IN_PROGRESS ), 120
75+ )
76+ return job
77+
78+
79+ def stop_job (job , feast_client : Client , feature_table : FeatureTable ):
80+ if job :
81+ job .cancel ()
82+ else :
83+ feast_client .delete_feature_table (feature_table .name )
84+
85+
86+ def test_validation_with_ge (feast_client : Client , kafka_server , pytestconfig ):
6987 kafka_broker = f"{ kafka_server [0 ]} :{ kafka_server [1 ]} "
7088 topic_name = f"avro-{ uuid .uuid4 ()} "
7189
@@ -82,11 +100,7 @@ def test_validation_with_ge(feast_client: Client, kafka_server):
82100 udf = create_validation_udf ("testUDF" , expectations , feature_table )
83101 apply_validation (feast_client , feature_table , udf , validation_window_secs = 1 )
84102
85- job = feast_client .start_stream_to_online_ingestion (feature_table )
86-
87- wait_retry_backoff (
88- lambda : (None , job .get_status () == SparkJobStatus .IN_PROGRESS ), 120
89- )
103+ job = start_job (feast_client , feature_table , pytestconfig )
90104
91105 wait_retry_backoff (
92106 lambda : (None , check_consumer_exist (kafka_broker , topic_name )), 120
@@ -117,7 +131,7 @@ def test_validation_with_ge(feast_client: Client, kafka_server):
117131 expected_ingested_count = test_data .shape [0 ] - len (invalid_idx ),
118132 )
119133 finally :
120- job . cancel ( )
134+ stop_job ( job , feast_client , feature_table )
121135
122136 test_data ["num" ] = test_data ["num" ].astype (np .float64 )
123137 test_data ["num" ].iloc [invalid_idx ] = np .nan
@@ -133,7 +147,7 @@ def test_validation_with_ge(feast_client: Client, kafka_server):
133147
134148@pytest .mark .env ("local" )
135149def test_validation_reports_metrics (
136- feast_client : Client , kafka_server , statsd_server : StatsDServer
150+ feast_client : Client , kafka_server , statsd_server : StatsDServer , pytestconfig
137151):
138152 kafka_broker = f"{ kafka_server [0 ]} :{ kafka_server [1 ]} "
139153 topic_name = f"avro-{ uuid .uuid4 ()} "
@@ -153,11 +167,7 @@ def test_validation_reports_metrics(
153167 udf = create_validation_udf ("testUDF" , expectations , feature_table )
154168 apply_validation (feast_client , feature_table , udf , validation_window_secs = 10 )
155169
156- job = feast_client .start_stream_to_online_ingestion (feature_table )
157-
158- wait_retry_backoff (
159- lambda : (None , job .get_status () == SparkJobStatus .IN_PROGRESS ), 120
160- )
170+ job = start_job (feast_client , feature_table , pytestconfig )
161171
162172 wait_retry_backoff (
163173 lambda : (None , check_consumer_exist (kafka_broker , topic_name )), 120
@@ -196,7 +206,7 @@ def test_validation_reports_metrics(
196206 expected_ingested_count = test_data .shape [0 ] - len (invalid_idx ),
197207 )
198208 finally :
199- job . cancel ( )
209+ stop_job ( job , feast_client , feature_table )
200210
201211 expected_metrics = [
202212 (
0 commit comments