Skip to content

Commit 19fa5c3

Browse files
harupyB-Step62
andauthored
Revert "Revert "Unpin pyspark"" (mlflow#10148)
Signed-off-by: harupy <[email protected]> Signed-off-by: B-Step62 <[email protected]> Co-authored-by: Yuki Watanabe <[email protected]>
1 parent 87f1abb commit 19fa5c3

File tree

10 files changed

+11
-17
lines changed

10 files changed

+11
-17
lines changed

.github/actions/py-cache-key/action.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,5 +22,5 @@ runs:
2222
# Refresh cache daily
2323
DATE=$(date -u "+%Y%m%d")
2424
# Change this value to force a cache refresh
25-
N=0
25+
N=1
2626
echo "value=$RUNNER_IMAGE-$PYTHON_VERSION-$DATE-$REQUIREMENTS_HASH-$N" >> $GITHUB_OUTPUT

.github/workflows/master.yml

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -109,8 +109,6 @@ jobs:
109109
python -m venv .venv
110110
source .venv/bin/activate
111111
source ./dev/install-common-deps.sh --ml
112-
# pyspark 3.5 is incompatible with delta 2.4
113-
pip install 'pyspark<3.5'
114112
- uses: ./.github/actions/pipdeptree
115113
- name: Import check
116114
run: |
@@ -353,8 +351,7 @@ jobs:
353351
pip install -r requirements/test-requirements.txt
354352
pip install --no-dependencies tests/resources/mlflow-test-plugin
355353
pip install -e .[extras]
356-
# pyspark 3.5 is incompatible with delta 2.4
357-
pip install 'pyspark<3.5'
354+
pip install pyspark
358355
pip install mleap
359356
# Install Hugging Face datasets to test Hugging Face usage with MLflow dataset tracking
360357
pip install datasets

.github/workflows/recipe.yml

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,7 @@ jobs:
4242
- name: Install dependencies
4343
run: |
4444
source ./dev/install-common-deps.sh
45-
pip install -e .
46-
# pyspark 3.5 is incompatible with delta 2.4
47-
pip install 'pyspark<3.5'
45+
pip install pyspark
4846
- name: Run tests
4947
run: |
5048
pytest tests/recipes
@@ -70,8 +68,7 @@ jobs:
7068
pip install -r requirements/test-requirements.txt
7169
pip install --no-dependencies tests/resources/mlflow-test-plugin
7270
pip install -e .
73-
# pyspark 3.5 is incompatible with delta 2.4
74-
pip install 'pyspark<3.5'
71+
pip install pyspark
7572
# TODO: Importing datasets in a pandas UDF (created by mlflow.pyfunc.spark_udf) crashes
7673
# the Python worker. To avoid this, uninstall `datasets`. This is a temporary workaround.
7774
pip uninstall -y datasets

mlflow/utils/_spark_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def _create_local_spark_session_for_recipes():
4646
_prepare_subprocess_environ_for_creating_local_spark_session()
4747
return (
4848
SparkSession.builder.master("local[*]")
49-
.config("spark.jars.packages", "io.delta:delta-core_2.12:2.4.0")
49+
.config("spark.jars.packages", "io.delta:delta-spark_2.12:3.0.0")
5050
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
5151
.config(
5252
"spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog"

tests/data/test_delta_dataset_source.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def spark_session():
1414

1515
with (
1616
SparkSession.builder.master("local[*]")
17-
.config("spark.jars.packages", "io.delta:delta-core_2.12:2.4.0")
17+
.config("spark.jars.packages", "io.delta:delta-spark_2.12:3.0.0")
1818
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
1919
.config(
2020
"spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog"

tests/data/test_pandas_dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def spark_session():
2424

2525
with (
2626
SparkSession.builder.master("local[*]")
27-
.config("spark.jars.packages", "io.delta:delta-core_2.12:2.4.0")
27+
.config("spark.jars.packages", "io.delta:delta-spark_2.12:3.0.0")
2828
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
2929
.config(
3030
"spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog"

tests/data/test_spark_dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ def spark_session(tmp_path):
2121

2222
with (
2323
SparkSession.builder.master("local[*]")
24-
.config("spark.jars.packages", "io.delta:delta-core_2.12:2.4.0")
24+
.config("spark.jars.packages", "io.delta:delta-spark_2.12:3.0.0")
2525
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
2626
.config(
2727
"spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog"

tests/data/test_spark_dataset_source.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def spark_session():
1414

1515
with (
1616
SparkSession.builder.master("local[*]")
17-
.config("spark.jars.packages", "io.delta:delta-core_2.12:2.4.0")
17+
.config("spark.jars.packages", "io.delta:delta-spark_2.12:3.0.0")
1818
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
1919
.config(
2020
"spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog"

tests/recipes/test_ingest_step.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def spark_session():
3535
with tempfile.TemporaryDirectory() as tmpdir:
3636
with (
3737
SparkSession.builder.master("local[*]")
38-
.config("spark.jars.packages", "io.delta:delta-core_2.12:2.4.0")
38+
.config("spark.jars.packages", "io.delta:delta-spark_2.12:3.0.0")
3939
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
4040
.config(
4141
"spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog"

tests/recipes/test_predict_step.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def spark_session():
2828
with tempfile.TemporaryDirectory() as tmpdir:
2929
with (
3030
SparkSession.builder.master("local[*]")
31-
.config("spark.jars.packages", "io.delta:delta-core_2.12:2.4.0")
31+
.config("spark.jars.packages", "io.delta:delta-spark_2.12:3.0.0")
3232
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
3333
.config(
3434
"spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog"

0 commit comments

Comments
 (0)