Skip to content

Commit 113825b

Browse files
committed
Actually run spark tests
1 parent a7dac01 commit 113825b

File tree

4 files changed

+11
-2
lines changed

4 files changed

+11
-2
lines changed

.circleci/install-prerequisites.sh

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,4 +34,9 @@ echo "Installing OS-level dependencies: $ALL_DEPENDENCIES"
3434

3535
sudo apt-get clean && sudo apt-get -y update && sudo ACCEPT_EULA='Y' apt-get -y install $ALL_DEPENDENCIES
3636

37+
if [ "$ENGINE" == "spark" ]; then
38+
echo "Using Java version for spark:"
39+
java -version
40+
fi
41+
3742
echo "All done"

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -188,7 +188,7 @@ postgres-test: engine-postgres-up
188188
pytest -n auto -m "postgres" --retries 3 --junitxml=test-results/junit-postgres.xml
189189

190190
spark-test: engine-spark-up
191-
pytest -n auto -m "spark" --retries 3 --junitxml=test-results/junit-spark.xml
191+
pytest -n auto -m "spark" --retries 3 --junitxml=test-results/junit-spark.xml && pytest -n auto -m "pyspark" --retries 3 --junitxml=test-results/junit-pyspark.xml
192192

193193
trino-test: engine-trino-up
194194
pytest -n auto -m "trino" --retries 3 --junitxml=test-results/junit-trino.xml

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -264,6 +264,7 @@ markers = [
264264
"redshift: test for Redshift",
265265
"snowflake: test for Snowflake",
266266
"spark: test for Spark",
267+
"pyspark: test for PySpark that need to run separately from the other spark tests",
267268
"trino: test for Trino (all connectors)",
268269
"risingwave: test for Risingwave",
269270

tests/engines/spark/test_db_api.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,10 @@
44
from sqlmesh.engines.spark.db_api import errors
55
from sqlmesh.engines.spark.db_api import spark_session as spark_session_db
66

7-
pytestmark = [pytest.mark.slow, pytest.mark.spark]
7+
# note: this is deliberately not marked with 'spark' so that it
8+
# can run separately from the spark integration tests.
9+
# running them at the same time mutates some global state in the SparkSession which breaks these tests
10+
pytestmark = [pytest.mark.slow, pytest.mark.pyspark]
811

912

1013
def test_spark_session_cursor(spark_session: SparkSession):

0 commit comments

Comments
 (0)