Skip to content

Commit ee2cd83

Browse files
authored
chore: reorganize test marks (#1933)
* chore: reorganize test marks * engine integration test fixes * engine integration test fixes * change unit/integration to fast/slow * fix engine marks
1 parent cf9c097 commit ee2cd83

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

49 files changed

+367
-205
lines changed

.circleci/continue_config.yml

Lines changed: 21 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ commands:
3737
- run: circleci-agent step halt
3838

3939
jobs:
40-
style_and_unit_tests:
40+
style_and_slow_tests:
4141
parameters:
4242
python_version:
4343
type: string
@@ -57,13 +57,13 @@ jobs:
5757
name: Run linters and code style checks
5858
command: make py-style
5959
- run:
60-
name: Run unit tests
61-
command: make unit-test
60+
name: Run slow tests
61+
command: make slow-test
6262
- run:
6363
name: Run doc tests
6464
command: make doc-test
6565

66-
style_and_unit_tests_pydantic_v1:
66+
style_and_slow_tests_pydantic_v1:
6767
docker:
6868
- image: cimg/python:3.10
6969
resource_class: small
@@ -83,8 +83,8 @@ jobs:
8383
name: Run linters and code style checks
8484
command: make py-style
8585
- run:
86-
name: Run unit tests
87-
command: make unit-test
86+
name: Run slow tests
87+
command: make slow-test
8888

8989
ui_style:
9090
docker:
@@ -132,21 +132,7 @@ jobs:
132132
name: Run tests
133133
command: npm --prefix web/client run test
134134

135-
core_integration_tests:
136-
docker:
137-
- image: cimg/python:3.7
138-
resource_class: small
139-
steps:
140-
- halt_unless_core
141-
- checkout
142-
- run:
143-
name: Install dependencies
144-
command: make install-dev
145-
- run:
146-
name: Run Core integration tests
147-
command: make core-it-test
148-
149-
airflow_integration_tests:
135+
airflow_docker_tests:
150136
machine:
151137
image: ubuntu-2204:2022.10.2
152138
docker_layer_caching: true
@@ -160,8 +146,8 @@ jobs:
160146
name: Install ruamel.yaml
161147
command: pip3 install ruamel.yaml==0.16.0
162148
- run:
163-
name: Run Airflow integration tests
164-
command: make airflow-it-test-docker-with-env
149+
name: Run Airflow slow tests
150+
command: make airflow-docker-test-with-env
165151
no_output_timeout: 15m
166152
- run:
167153
name: Collect Airflow logs
@@ -173,7 +159,7 @@ jobs:
173159
- store_artifacts:
174160
path: /tmp/airflow_logs
175161

176-
engine_adapter_integration_local_only_tests:
162+
engine_adapter_docker_tests:
177163
machine:
178164
image: ubuntu-2204:2022.10.2
179165
docker_layer_caching: true
@@ -185,39 +171,38 @@ jobs:
185171
command: sudo apt-get update && sudo apt-get install libpq-dev
186172
- run:
187173
name: Install dependencies
188-
command: make install-engine-integration
174+
command: make install-engine-test
189175
- run:
190-
name: Bring up MySQL, Postgres, and MSSQL
191-
command: make core_engine_it_test_docker
176+
name: Bring up Dockerized Engines
177+
command: make engine-up
192178
- run:
193179
name: Make sure DBs are ready
194180
command: sleep 60
195181
- run:
196182
name: Run tests
197-
command: make core_engine_it_test_local_only
183+
command: make engine-docker-test
198184
no_output_timeout: 15m
199185

200186
workflows:
201187
main_pr:
202188
jobs:
203-
- style_and_unit_tests:
189+
- style_and_slow_tests:
204190
matrix:
205191
parameters:
206192
python_version:
207193
["3.7", "3.8", "3.9", "3.10", "3.11"]
208-
- style_and_unit_tests_pydantic_v1
209-
- core_integration_tests
210-
- airflow_integration_tests:
194+
- style_and_slow_tests_pydantic_v1
195+
- airflow_docker_tests:
211196
requires:
212-
- style_and_unit_tests
197+
- style_and_slow_tests
213198
filters:
214199
branches:
215200
only:
216201
- main
217-
- engine_adapter_integration_local_only_tests:
218-
context: engine_adapter_integration
202+
- engine_adapter_docker_tests:
203+
context: engine_adapter_slow
219204
requires:
220-
- style_and_unit_tests
205+
- style_and_slow_tests
221206
filters:
222207
branches:
223208
only:

Makefile

Lines changed: 94 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
install-dev:
44
pip3 install -e ".[dev,web,slack]"
55

6-
install-engine-integration:
6+
install-engine-test:
77
pip3 install -e ".[dev,web,slack,mysql,postgres,databricks,redshift,bigquery,snowflake,trino,mssql]"
88

99
install-pre-commit:
@@ -18,32 +18,9 @@ py-style:
1818
ui-style:
1919
SKIP=autoflake,isort,black,mypy pre-commit run --all-files
2020

21-
unit-test:
22-
pytest -m "not integration"
23-
2421
doc-test:
2522
PYTEST_PLUGINS=tests.common_fixtures pytest --doctest-modules sqlmesh/core sqlmesh/utils
2623

27-
core-it-test:
28-
pytest -m "core_integration"
29-
30-
core_engine_it_test:
31-
pytest -m "engine_integration"
32-
33-
core_engine_it_test_local_only:
34-
pytest -m "engine_integration_local"
35-
36-
core_engine_it_test_docker:
37-
docker-compose -f ./tests/core/engine_adapter/docker-compose.yaml up -d
38-
39-
engine_it_test: core_engine_it_test_docker core_engine_it_test
40-
41-
it-test: core-it-test airflow-it-test-with-env
42-
43-
it-test-docker: core-it-test airflow-it-test-docker-with-env
44-
45-
test: unit-test doc-test it-test
46-
4724
package:
4825
pip3 install wheel && python3 setup.py sdist bdist_wheel
4926

@@ -71,17 +48,6 @@ airflow-psql:
7148
airflow-spark-sql:
7249
make -C ./examples/airflow spark-sql
7350

74-
airflow-it-test:
75-
export AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@localhost/airflow && \
76-
pytest -m "airflow_integration"
77-
78-
airflow-it-test-docker:
79-
make -C ./examples/airflow it-test-docker
80-
81-
airflow-it-test-with-env: develop airflow-clean airflow-init airflow-run airflow-it-test airflow-stop
82-
83-
airflow-it-test-docker-with-env: develop airflow-clean airflow-init airflow-run airflow-it-test-docker airflow-stop
84-
8551
docs-serve:
8652
mkdocs serve
8753

@@ -106,4 +72,96 @@ clean-build:
10672
dev-publish: ui-build clean-build publish
10773

10874
jupyter-example:
109-
jupyter lab tests/integrations/jupyter/example_outputs.ipynb
75+
jupyter lab tests/slows/jupyter/example_outputs.ipynb
76+
77+
engine-up:
78+
docker-compose -f ./tests/core/engine_adapter/docker-compose.yaml up -d
79+
80+
engine-down:
81+
docker-compose -f ./tests/core/engine_adapter/docker-compose.yaml down
82+
83+
fast-test:
84+
pytest -m "fast"
85+
86+
slow-test:
87+
pytest -m "fast or slow"
88+
89+
core-fast-test:
90+
pytest -m "fast and not web and not github and not dbt and not airflow and not jupyter"
91+
92+
core-slow-test:
93+
pytest -m "(fast or slow) and not web and not github and not dbt and not airflow and not jupyter"
94+
95+
airflow-fast-test:
96+
pytest -m "fast and airflow"
97+
98+
airflow-test:
99+
pytest -m "(fast or slow) and airflow"
100+
101+
airflow-local-test:
102+
export AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@localhost/airflow && \
103+
pytest -m "docker and airflow"
104+
105+
airflow-docker-test:
106+
make -C ./examples/airflow docker-test
107+
108+
airflow-local-test-with-env: develop airflow-clean airflow-init airflow-run airflow-local-test airflow-stop
109+
110+
airflow-docker-test-with-env: develop airflow-clean airflow-init airflow-run airflow-docker-test airflow-stop
111+
112+
engine-slow-test:
113+
pytest -m "(fast or slow) and engine"
114+
115+
engine-docker-test:
116+
pytest -m "docker and engine"
117+
118+
engine-remote-test:
119+
pytest -m "remote and engine"
120+
121+
engine-test:
122+
pytest -m "engine"
123+
124+
dbt-test:
125+
pytest -m "dbt"
126+
127+
github-test:
128+
pytest -m "github"
129+
130+
jupyter-test:
131+
pytest -m "jupyter"
132+
133+
web-test:
134+
pytest -m "web"
135+
136+
bigquery-test:
137+
pytest -m "bigquery"
138+
139+
databricks-test:
140+
pytest -m "databricks"
141+
142+
duckdb-test:
143+
pytest -m "duckdb"
144+
145+
mssql-test:
146+
pytest -m "mssql"
147+
148+
mysql-test:
149+
pytest -m "mysql"
150+
151+
postgres-test:
152+
pytest -m "postgres"
153+
154+
redshift-test:
155+
pytest -m "redshift"
156+
157+
snowflake-test:
158+
pytest -m "snowflake"
159+
160+
spark-test:
161+
pytest -m "spark"
162+
163+
spark-pyspark-test:
164+
pytest -m "spark_pyspark"
165+
166+
trino-test:
167+
pytest -m "trino"

examples/airflow/Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,5 +52,5 @@ psql:
5252
spark-sql:
5353
docker exec -it airflow-airflow-worker-1 spark-sql
5454

55-
it-test-docker: decorate-docker-compose
55+
docker-test: decorate-docker-compose
5656
docker-compose up --exit-code-from sqlmesh-tests sqlmesh-tests

examples/airflow/docker_compose_decorator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@
7373
"entrypoint": "/bin/bash",
7474
"command": [
7575
"-c",
76-
"make install-dev && pytest -m 'airflow_integration'",
76+
"make install-dev && pytest -m 'airflow and docker'",
7777
],
7878
"image": "airflow-sqlmesh",
7979
"user": "airflow",

pytest.ini

Lines changed: 28 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,33 @@
11
[pytest]
22
markers =
3-
integration: mark test as an integration test which can take longer to run and require an external environment
4-
core_integration: mark test as an integration test that does not require an external environment
5-
airflow_integration: mark test as needing the Airflow cluster to run successfully
6-
spark: mark test as requiring the PySpark dependency
7-
airflow: mark test as requiring the Airflow dependency
8-
engine_integration: Engine adapter tests that require external services
9-
engine_integration_local: Engine adapter tests that require local Docker containers
3+
# Test Type Markers
4+
# Tests are ordered from fastest to slowest
5+
fast: fast tests (automatically applied if no type markers)
6+
slow: slow tests that typically involve interacting with a local DB (like DuckDB)
7+
docker: test that involves interacting with a Docker container
8+
remote: test that involves interacting with a remote DB
9+
10+
# Test Domain Markers
11+
# default: core functionality
12+
airflow: test for Airflow scheduler
13+
cli: test for CLI
14+
dbt: test for dbt adapter
15+
github: test for Github CI/CD bot
16+
jupyter: tests for Jupyter integration
17+
web: tests for web UI
18+
spark_pyspark: test for Spark with PySpark dependency
19+
# Engine Adapters
20+
engine: test all engine adapters
21+
bigquery: test for BigQuery
22+
databricks: test for Databricks
23+
duckdb: test for DuckDB
24+
mssql: test for MSSQL
25+
mysql: test for MySQL
26+
postgres: test for Postgres
27+
redshift: test for Redshift
28+
snowflake: test for Snowflake
29+
spark: test for Spark
30+
trino: test for Trino
1031

1132
# Set this to True to enable logging during tests
1233
log_cli = False

setup.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,9 +101,10 @@
101101
"tenacity==8.1.0",
102102
"types-croniter",
103103
"types-dateparser",
104-
"typing-extensions",
104+
"types-python-dateutil",
105105
"types-pytz",
106106
"types-requests==2.28.8",
107+
"typing-extensions",
107108
],
108109
"dbt": [
109110
"dbt-core<1.5.0",

tests/conftest.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -133,6 +133,17 @@ def validate(
133133
raise NotImplementedError(f"Unknown model_name: {model_name}")
134134

135135

136+
def pytest_collection_modifyitems(items, *args, **kwargs):
137+
test_type_markers = {"fast", "slow", "docker", "remote"}
138+
for item in items:
139+
for marker in item.iter_markers():
140+
if marker.name in test_type_markers:
141+
break
142+
else:
143+
# if no test type marker is found, assume fast test
144+
item.add_marker("fast")
145+
146+
136147
# Ignore all local config files
137148
@pytest.fixture(scope="session", autouse=True)
138149
def ignore_local_config_files():

tests/core/engine_adapter/test_base.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@
1818
from sqlmesh.utils.errors import UnsupportedCatalogOperationError
1919
from tests.core.engine_adapter import to_sql_calls
2020

21+
pytestmark = pytest.mark.engine
22+
2123

2224
def test_create_view(make_mocked_engine_adapter: t.Callable):
2325
adapter = make_mocked_engine_adapter(EngineAdapter)

tests/core/engine_adapter/test_base_postgres.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,13 @@
22
import typing as t
33
from unittest.mock import call
44

5+
import pytest
56
from sqlglot import exp, parse_one
67

78
from sqlmesh.core.engine_adapter.base_postgres import BasePostgresEngineAdapter
89

10+
pytestmark = [pytest.mark.engine, pytest.mark.postgres, pytest.mark.redshift]
11+
912

1013
def test_columns(make_mocked_engine_adapter: t.Callable):
1114
adapter = make_mocked_engine_adapter(BasePostgresEngineAdapter)

0 commit comments

Comments
 (0)