Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

Don't distinguish between types of ArithmeticException for Spark 3.2.x #5483

Merged
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions integration_tests/src/main/python/arithmetic_ops_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from marks import ignore_order, incompat, approximate_float, allow_non_gpu
from pyspark.sql.types import *
from pyspark.sql.types import IntegralType
from spark_session import with_cpu_session, with_gpu_session, with_spark_session, is_before_spark_320, is_before_spark_330, is_databricks91_or_later
from spark_session import with_cpu_session, with_gpu_session, with_spark_session, is_before_spark_320, is_before_spark_330, is_databricks91_or_later, is_spark_330_or_later
import pyspark.sql.functions as f
from datetime import timedelta

Expand Down Expand Up @@ -283,11 +283,17 @@ def test_mod_pmod_long_min_value():
'cast(-12 as {}) % cast(0 as {})'], ids=idfn)
def test_mod_pmod_by_zero(data_gen, overflow_exp):
string_type = to_cast_string(data_gen.data_type)
# spark 31X: throws java.lang.ArithmeticException
# spark 32X: throws either java.lang.ArithmeticException or org.apache.spark.SparkArithmeticException
# spark 33X: throws org.apache.spark.SparkArithmeticException
exception_str = "java.lang.ArithmeticException" if is_before_spark_320() else \
"org.apache.spark.SparkArithmeticException" if is_spark_330_or_later() else \
"ArithmeticException"
assert_gpu_and_cpu_error(
lambda spark : unary_op_df(spark, data_gen).selectExpr(
overflow_exp.format(string_type, string_type)).collect(),
ansi_enabled_conf,
"java.lang.ArithmeticException" if is_before_spark_320() else "org.apache.spark.SparkArithmeticException")
exception_str)

@pytest.mark.parametrize('data_gen', _arith_data_gens_no_neg_scale, ids=idfn)
def test_mod_pmod_by_zero_not_ansi(data_gen):
Expand Down