Skip to content

Commit 343acf9

Browse files
committed
temp skip tests
1 parent 7d41884 commit 343acf9

File tree

1 file changed

+32
-0
lines changed

1 file changed

+32
-0
lines changed

python/pyspark/sql/tests/pandas/test_pandas_transform_with_state.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -194,6 +194,7 @@ def _test_transform_with_state_basic(
194194
q.awaitTermination(10)
195195
self.assertTrue(q.exception() is None)
196196

197+
@unittest.skip("Temporarily disabled for testing")
197198
def test_transform_with_state_basic(self):
198199
def check_results(batch_df, batch_id):
199200
batch_df.collect()
@@ -210,6 +211,7 @@ def check_results(batch_df, batch_id):
210211

211212
self._test_transform_with_state_basic(SimpleStatefulProcessorFactory(), check_results)
212213

214+
@unittest.skip("Temporarily disabled for testing")
213215
def test_transform_with_state_non_exist_value_state(self):
214216
def check_results(batch_df, _):
215217
batch_df.collect()
@@ -222,6 +224,7 @@ def check_results(batch_df, _):
222224
InvalidSimpleStatefulProcessorFactory(), check_results, True
223225
)
224226

227+
@unittest.skip("Temporarily disabled for testing")
225228
def test_transform_with_state_query_restarts(self):
226229
root_path = tempfile.mkdtemp()
227230
input_path = root_path + "/input"
@@ -296,6 +299,7 @@ def test_transform_with_state_query_restarts(self):
296299
Row(id="1", countAsString="2"),
297300
}
298301

302+
@unittest.skip("Temporarily disabled for testing")
299303
def test_transform_with_state_list_state(self):
300304
def check_results(batch_df, _):
301305
batch_df.collect()
@@ -308,6 +312,7 @@ def check_results(batch_df, _):
308312
ListStateProcessorFactory(), check_results, True, "processingTime"
309313
)
310314

315+
@unittest.skip("Temporarily disabled for testing")
311316
def test_transform_with_state_list_state_large_list(self):
312317
def check_results(batch_df, batch_id):
313318
batch_df.collect()
@@ -383,6 +388,7 @@ def check_results(batch_df, batch_id):
383388
self.assertTrue(q.exception() is None)
384389

385390
# test list state with ttl has the same behavior as list state when state doesn't expire.
391+
@unittest.skip("Temporarily disabled for testing")
386392
def test_transform_with_state_list_state_large_ttl(self):
387393
def check_results(batch_df, batch_id):
388394
batch_df.collect()
@@ -395,6 +401,7 @@ def check_results(batch_df, batch_id):
395401
ListStateLargeTTLProcessorFactory(), check_results, True, "processingTime"
396402
)
397403

404+
@unittest.skip("Temporarily disabled for testing")
398405
def test_transform_with_state_map_state(self):
399406
def check_results(batch_df, _):
400407
batch_df.collect()
@@ -406,6 +413,7 @@ def check_results(batch_df, _):
406413
self._test_transform_with_state_basic(MapStateProcessorFactory(), check_results, True)
407414

408415
# test map state with ttl has the same behavior as map state when state doesn't expire.
416+
@unittest.skip("Temporarily disabled for testing")
409417
def test_transform_with_state_map_state_large_ttl(self):
410418
def check_results(batch_df, batch_id):
411419
batch_df.collect()
@@ -420,6 +428,7 @@ def check_results(batch_df, batch_id):
420428

421429
# test value state with ttl has the same behavior as value state when
422430
# state doesn't expire.
431+
@unittest.skip("Temporarily disabled for testing")
423432
def test_value_state_ttl_basic(self):
424433
def check_results(batch_df, batch_id):
425434
batch_df.collect()
@@ -440,6 +449,7 @@ def check_results(batch_df, batch_id):
440449

441450
# TODO SPARK-50908 holistic fix for TTL suite
442451
@unittest.skip("test is flaky and it is only a timing issue, skipping until we can resolve")
452+
@unittest.skip("Temporarily disabled for testing")
443453
def test_value_state_ttl_expiration(self):
444454
def check_results(batch_df, batch_id):
445455
batch_df.collect()
@@ -589,6 +599,7 @@ def _test_transform_with_state_proc_timer(self, stateful_processor_factory, chec
589599
q.awaitTermination(10)
590600
self.assertTrue(q.exception() is None)
591601

602+
@unittest.skip("Temporarily disabled for testing")
592603
def test_transform_with_state_proc_timer(self):
593604
def check_results(batch_df, batch_id):
594605
batch_df.collect()
@@ -706,6 +717,7 @@ def prepare_batch3(input_path):
706717
q.awaitTermination(10)
707718
self.assertTrue(q.exception() is None)
708719

720+
@unittest.skip("Temporarily disabled for testing")
709721
def test_transform_with_state_event_time(self):
710722
def check_results(batch_df, batch_id):
711723
batch_df.collect()
@@ -738,6 +750,7 @@ def check_results(batch_df, batch_id):
738750
EventTimeStatefulProcessorFactory(), check_results
739751
)
740752

753+
@unittest.skip("Temporarily disabled for testing")
741754
def test_transform_with_state_with_wmark_and_non_event_time(self):
742755
def check_results(batch_df, batch_id):
743756
batch_df.collect()
@@ -836,6 +849,7 @@ def _test_transform_with_state_init_state(
836849
q.awaitTermination(10)
837850
self.assertTrue(q.exception() is None)
838851

852+
@unittest.skip("Temporarily disabled for testing")
839853
def test_transform_with_state_init_state(self):
840854
def check_results(batch_df, batch_id):
841855
batch_df.collect()
@@ -860,6 +874,7 @@ def check_results(batch_df, batch_id):
860874
SimpleStatefulProcessorWithInitialStateFactory(), check_results
861875
)
862876

877+
@unittest.skip("Temporarily disabled for testing")
863878
def test_transform_with_state_init_state_with_extra_transformation(self):
864879
def check_results(batch_df, batch_id):
865880
batch_df.collect()
@@ -939,6 +954,7 @@ def _test_transform_with_state_non_contiguous_grouping_cols(
939954
q.awaitTermination(10)
940955
self.assertTrue(q.exception() is None)
941956

957+
@unittest.skip("Temporarily disabled for testing")
942958
def test_transform_with_state_non_contiguous_grouping_cols(self):
943959
def check_results(batch_df, batch_id):
944960
batch_df.collect()
@@ -951,6 +967,7 @@ def check_results(batch_df, batch_id):
951967
SimpleStatefulProcessorWithInitialStateFactory(), check_results
952968
)
953969

970+
@unittest.skip("Temporarily disabled for testing")
954971
def test_transform_with_state_non_contiguous_grouping_cols_with_init_state(self):
955972
def check_results(batch_df, batch_id):
956973
batch_df.collect()
@@ -1034,6 +1051,7 @@ def _test_transform_with_state_chaining_ops(
10341051
q.processAllAvailable()
10351052
q.awaitTermination(10)
10361053

1054+
@unittest.skip("Temporarily disabled for testing")
10371055
def test_transform_with_state_chaining_ops(self):
10381056
def check_results(batch_df, batch_id):
10391057
batch_df.collect()
@@ -1070,6 +1088,7 @@ def check_results(batch_df, batch_id):
10701088
["outputTimestamp", "id"],
10711089
)
10721090

1091+
@unittest.skip("Temporarily disabled for testing")
10731092
def test_transform_with_state_init_state_with_timers(self):
10741093
def check_results(batch_df, batch_id):
10751094
batch_df.collect()
@@ -1100,6 +1119,7 @@ def check_results(batch_df, batch_id):
11001119
StatefulProcessorWithInitialStateTimersFactory(), check_results, "processingTime"
11011120
)
11021121

1122+
@unittest.skip("Temporarily disabled for testing")
11031123
def test_transform_with_state_batch_query(self):
11041124
data = [("0", 123), ("0", 46), ("1", 146), ("1", 346)]
11051125
df = self.spark.createDataFrame(data, "id string, temperature int")
@@ -1131,6 +1151,7 @@ def test_transform_with_state_batch_query(self):
11311151
Row(id="1", countAsString="2"),
11321152
}
11331153

1154+
@unittest.skip("Temporarily disabled for testing")
11341155
def test_transform_with_state_batch_query_initial_state(self):
11351156
data = [("0", 123), ("0", 46), ("1", 146), ("1", 346)]
11361157
df = self.spark.createDataFrame(data, "id string, temperature int")
@@ -1175,9 +1196,11 @@ def test_transform_with_state_batch_query_initial_state(self):
11751196
@unittest.skipIf(
11761197
"COVERAGE_PROCESS_START" in os.environ, "Flaky with coverage enabled, skipping for now."
11771198
)
1199+
@unittest.skip("Temporarily disabled for testing")
11781200
def test_transform_with_map_state_metadata(self):
11791201
self._test_transform_with_map_state_metadata(None)
11801202

1203+
@unittest.skip("Temporarily disabled for testing")
11811204
def test_transform_with_map_state_metadata_with_init_state(self):
11821205
# run the same test suite again but with no-op initial state
11831206
# TWS with initial state is using a different python runner
@@ -1310,6 +1333,7 @@ def check_results(batch_df, batch_id):
13101333
)
13111334

13121335
# This test covers multiple list state variables and flatten option
1336+
@unittest.skip("Temporarily disabled for testing")
13131337
def test_transform_with_list_state_metadata(self):
13141338
checkpoint_path = tempfile.mktemp()
13151339

@@ -1390,6 +1414,7 @@ def check_results(batch_df, batch_id):
13901414

13911415
# This test covers value state variable and read change feed,
13921416
# snapshotStartBatchId related options
1417+
@unittest.skip("Temporarily disabled for testing")
13931418
def test_transform_with_value_state_metadata(self):
13941419
checkpoint_path = tempfile.mktemp()
13951420

@@ -1480,6 +1505,7 @@ def check_results(batch_df, batch_id):
14801505
checkpoint_path=checkpoint_path,
14811506
)
14821507

1508+
@unittest.skip("Temporarily disabled for testing")
14831509
def test_transform_with_state_restart_with_multiple_rows_init_state(self):
14841510
def check_results(batch_df, _):
14851511
batch_df.collect()
@@ -1539,6 +1565,7 @@ def dataframe_to_value_list(output_df):
15391565
initial_state=init_df,
15401566
)
15411567

1568+
@unittest.skip("Temporarily disabled for testing")
15421569
def test_transform_with_state_in_pandas_composite_type(self):
15431570
def check_results(batch_df, batch_id):
15441571
if batch_id == 0:
@@ -1597,6 +1624,7 @@ def check_results(batch_df, batch_id):
15971624
)
15981625

15991626
# run the same test suites again but with single shuffle partition
1627+
@unittest.skip("Temporarily disabled for testing")
16001628
def test_transform_with_state_with_timers_single_partition(self):
16011629
with self.sql_conf({"spark.sql.shuffle.partitions": "1"}):
16021630
self.test_transform_with_state_init_state_with_timers()
@@ -1645,6 +1673,7 @@ def _run_evolution_test(self, processor_factory, checkpoint_dir, check_results,
16451673
q.processAllAvailable()
16461674
q.awaitTermination(10)
16471675

1676+
@unittest.skip("Temporarily disabled for testing")
16481677
def test_schema_evolution_scenarios(self):
16491678
"""Test various schema evolution scenarios"""
16501679
with self.sql_conf({"spark.sql.streaming.stateStore.encodingFormat": "avro"}):
@@ -1713,6 +1742,7 @@ def check_upcast(batch_df, batch_id):
17131742

17141743
# This test case verifies that an exception is thrown when downcasting, which violates
17151744
# Avro's schema evolution rules
1745+
@unittest.skip("Temporarily disabled for testing")
17161746
def test_schema_evolution_fails(self):
17171747
with self.sql_conf({"spark.sql.streaming.stateStore.encodingFormat": "avro"}):
17181748
with tempfile.TemporaryDirectory() as checkpoint_dir:
@@ -1765,6 +1795,7 @@ def check_basic_state(batch_df, batch_id):
17651795
and "Schema evolution is not possible" in error_msg
17661796
)
17671797

1798+
@unittest.skip("Temporarily disabled for testing")
17681799
def test_not_nullable_fails(self):
17691800
with self.sql_conf({"spark.sql.streaming.stateStore.encodingFormat": "avro"}):
17701801
with tempfile.TemporaryDirectory() as checkpoint_dir:
@@ -1797,6 +1828,7 @@ def check_basic_state(batch_df, batch_id):
17971828
and "column family state must be nullable" in error_msg
17981829
)
17991830

1831+
@unittest.skip("Temporarily disabled for testing")
18001832
def test_transform_with_state_int_to_decimal_coercion(self):
18011833
if not self.use_pandas():
18021834
return

0 commit comments

Comments
 (0)