Skip to content

Commit 7a04275

Browse files
committed
feat: widen dbt-core compatibility range
1 parent 4d8e831 commit 7a04275

File tree

14 files changed

+176
-62
lines changed

14 files changed

+176
-62
lines changed

.github/workflows/pr.yaml

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@ on:
88
concurrency:
99
group: 'pr-${{ github.event.pull_request.number }}'
1010
cancel-in-progress: true
11+
permissions:
12+
contents: read
1113
jobs:
1214
test-vscode:
1315
env:
@@ -66,3 +68,36 @@ jobs:
6668
name: playwright-report
6769
path: vscode/extension/playwright-report/
6870
retention-days: 30
71+
test-dbt-versions:
72+
runs-on: ubuntu-latest
73+
strategy:
74+
fail-fast: false
75+
matrix:
76+
dbt-version: ["1.3.0", "1.4.0", "1.5.0", "1.6.0", "1.7.0", "1.8.0", "1.9.0", "1.10.0"]
77+
steps:
78+
- uses: actions/checkout@v5
79+
- name: Set up Python
80+
uses: actions/setup-python@v5
81+
with:
82+
python-version: '3.10'
83+
- name: Install SQLMesh dev dependencies
84+
run: |
85+
python -m venv .venv
86+
source .venv/bin/activate
87+
make install-dev
88+
- name: Install dbt-core version
89+
run: |
90+
source .venv/bin/activate
91+
pip install "dbt-core~=${{ matrix.dbt-version }}"
92+
pip install pydantic>=2.0.0 --force-reinstall
93+
- name: Run dbt tests
94+
# We can't run slow tests across all engines due to tests requiring DuckDB and old versions
95+
# of DuckDB require a version of DuckDB we no longer support
96+
run: |
97+
source .venv/bin/activate
98+
make dbt-fast-test
99+
- name: Test SQLMesh info in sushi_dbt
100+
working-directory: ./examples/sushi_dbt
101+
run: |
102+
source ../../.venv/bin/activate
103+
sqlmesh info --skip-connection

Makefile

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,9 @@ engine-test:
9393
dbt-test:
9494
pytest -n auto -m "dbt and not cicdonly"
9595

96+
dbt-fast-test:
97+
pytest -n auto -m "dbt and fast"
98+
9699
github-test:
97100
pytest -n auto -m "github"
98101

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ bigframes = ["bigframes>=1.32.0"]
5252
clickhouse = ["clickhouse-connect"]
5353
databricks = ["databricks-sql-connector[pyarrow]"]
5454
dev = [
55-
"agate==1.7.1",
55+
"agate",
5656
"beautifulsoup4",
5757
"clickhouse-connect",
5858
"cryptography",

sqlmesh/dbt/loader.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -183,8 +183,11 @@ def _load_projects(self) -> t.List[Project]:
183183

184184
self._projects.append(project)
185185

186-
if project.context.target.database != (self.context.default_catalog or ""):
187-
raise ConfigError("Project default catalog does not match context default catalog")
186+
context_default_catalog = self.context.default_catalog or ""
187+
if project.context.target.database != context_default_catalog:
188+
raise ConfigError(
189+
f"Project default catalog ('{project.context.target.database}') does not match context default catalog ('{context_default_catalog}')."
190+
)
188191
for path in project.project_files:
189192
self._track_file(path)
190193

sqlmesh/dbt/manifest.py

Lines changed: 23 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,12 @@
1515
from sqlmesh.utils.conversions import make_serializable
1616

1717
# Override the file name to prevent dbt commands from invalidating the cache.
18-
dbt_constants.PARTIAL_PARSE_FILE_NAME = "sqlmesh_partial_parse.msgpack"
18+
if hasattr(dbt_constants, "PARTIAL_PARSE_FILE_NAME"):
19+
dbt_constants.PARTIAL_PARSE_FILE_NAME = "sqlmesh_partial_parse.msgpack"
20+
else:
21+
from dbt.parser import manifest as dbt_manifest
22+
23+
dbt_manifest.PARTIAL_PARSE_FILE_NAME = "sqlmesh_partial_parse.msgpack"
1924

2025
import jinja2
2126
from dbt.adapters.factory import register_adapter, reset_adapters
@@ -377,11 +382,17 @@ def _load_on_run_start_end(self) -> None:
377382

378383
if "on-run-start" in node.tags:
379384
self._on_run_start_per_package[node.package_name][node_name] = HookConfig(
380-
sql=sql, index=node.index or 0, path=node_path, dependencies=dependencies
385+
sql=sql,
386+
index=getattr(node, "index", None) or 0,
387+
path=node_path,
388+
dependencies=dependencies,
381389
)
382390
else:
383391
self._on_run_end_per_package[node.package_name][node_name] = HookConfig(
384-
sql=sql, index=node.index or 0, path=node_path, dependencies=dependencies
392+
sql=sql,
393+
index=getattr(node, "index", None) or 0,
394+
path=node_path,
395+
dependencies=dependencies,
385396
)
386397

387398
@property
@@ -591,6 +602,9 @@ def _macro_references(
591602
manifest: Manifest, node: t.Union[ManifestNode, Macro]
592603
) -> t.Set[MacroReference]:
593604
result: t.Set[MacroReference] = set()
605+
if not hasattr(node, "depends_on"):
606+
return result
607+
594608
for macro_node_id in node.depends_on.macros:
595609
if not macro_node_id:
596610
continue
@@ -606,18 +620,20 @@ def _macro_references(
606620

607621
def _refs(node: ManifestNode) -> t.Set[str]:
608622
if DBT_VERSION >= (1, 5, 0):
609-
result = set()
623+
result: t.Set[str] = set()
624+
if not hasattr(node, "refs"):
625+
return result
610626
for r in node.refs:
611-
ref_name = f"{r.package}.{r.name}" if r.package else r.name
627+
ref_name = f"{r.package}.{r.name}" if r.package else r.name # type: ignore
612628
if getattr(r, "version", None):
613-
ref_name = f"{ref_name}_v{r.version}"
629+
ref_name = f"{ref_name}_v{r.version}" # type: ignore
614630
result.add(ref_name)
615631
return result
616632
return {".".join(r) for r in node.refs} # type: ignore
617633

618634

619635
def _sources(node: ManifestNode) -> t.Set[str]:
620-
return {".".join(s) for s in node.sources}
636+
return {".".join(s) for s in getattr(node, "sources", [])}
621637

622638

623639
def _model_node_id(model_name: str, package: str) -> str:

sqlmesh/dbt/relation.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from sqlmesh.dbt.util import DBT_VERSION
22

33

4-
if DBT_VERSION < (1, 8, 0):
5-
from dbt.contracts.relation import * # type: ignore # noqa: F403
6-
else:
4+
if DBT_VERSION >= (1, 8, 0):
75
from dbt.adapters.contracts.relation import * # type: ignore # noqa: F403
6+
else:
7+
from dbt.contracts.relation import * # type: ignore # noqa: F403

sqlmesh/dbt/seed.py

Lines changed: 26 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,13 @@
55

66
import agate
77

8-
try:
8+
from sqlmesh.dbt.util import DBT_VERSION
9+
10+
if DBT_VERSION >= (1, 8, 0):
911
from dbt_common.clients import agate_helper # type: ignore
1012

1113
SUPPORTS_DELIMITER = True
12-
except ImportError:
14+
else:
1315
from dbt.clients import agate_helper # type: ignore
1416

1517
SUPPORTS_DELIMITER = False
@@ -90,31 +92,33 @@ def to_sqlmesh(
9092
)
9193

9294

93-
class Integer(agate_helper.Integer):
94-
def cast(self, d: t.Any) -> t.Optional[int]:
95-
if isinstance(d, str):
96-
# The dbt's implementation doesn't support coercion of strings to integers.
97-
if d.strip().lower() in self.null_values:
98-
return None
99-
try:
100-
return int(d)
101-
except ValueError:
102-
raise agate.exceptions.CastError('Can not parse value "%s" as Integer.' % d)
103-
return super().cast(d)
104-
105-
def jsonify(self, d: t.Any) -> str:
106-
return d
107-
108-
109-
agate_helper.Integer = Integer # type: ignore
110-
111-
11295
AGATE_TYPE_MAPPING = {
113-
agate_helper.Integer: exp.DataType.build("int"),
11496
agate_helper.Number: exp.DataType.build("double"),
11597
agate_helper.ISODateTime: exp.DataType.build("datetime"),
11698
agate.Date: exp.DataType.build("date"),
11799
agate.DateTime: exp.DataType.build("datetime"),
118100
agate.Boolean: exp.DataType.build("boolean"),
119101
agate.Text: exp.DataType.build("text"),
120102
}
103+
104+
105+
if DBT_VERSION >= (1, 7, 0):
106+
107+
class Integer(agate_helper.Integer):
108+
def cast(self, d: t.Any) -> t.Optional[int]:
109+
if isinstance(d, str):
110+
# The dbt's implementation doesn't support coercion of strings to integers.
111+
if d.strip().lower() in self.null_values:
112+
return None
113+
try:
114+
return int(d)
115+
except ValueError:
116+
raise agate.exceptions.CastError('Can not parse value "%s" as Integer.' % d)
117+
return super().cast(d)
118+
119+
def jsonify(self, d: t.Any) -> str:
120+
return d
121+
122+
agate_helper.Integer = Integer # type: ignore
123+
124+
AGATE_TYPE_MAPPING[agate_helper.Integer] = exp.DataType.build("int")

sqlmesh/dbt/util.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,10 @@ def _get_dbt_version() -> t.Tuple[int, int, int]:
2020

2121
DBT_VERSION = _get_dbt_version()
2222

23-
if DBT_VERSION < (1, 8, 0):
24-
from dbt.clients.agate_helper import table_from_data_flat, empty_table, as_matrix # type: ignore # noqa: F401
25-
else:
23+
if DBT_VERSION >= (1, 8, 0):
2624
from dbt_common.clients.agate_helper import table_from_data_flat, empty_table, as_matrix # type: ignore # noqa: F401
25+
else:
26+
from dbt.clients.agate_helper import table_from_data_flat, empty_table, as_matrix # type: ignore # noqa: F401
2727

2828

2929
def pandas_to_agate(df: pd.DataFrame) -> agate.Table:

tests/dbt/conftest.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from sqlmesh.core.context import Context
88
from sqlmesh.dbt.context import DbtContext
99
from sqlmesh.dbt.project import Project
10+
from sqlmesh.dbt.target import PostgresConfig
1011

1112

1213
@pytest.fixture()
@@ -25,3 +26,16 @@ def render(value: str) -> str:
2526
return render
2627

2728
return create_renderer
29+
30+
31+
@pytest.fixture()
32+
def dbt_dummy_postgres_config() -> PostgresConfig:
33+
return PostgresConfig( # type: ignore
34+
name="postgres",
35+
host="host",
36+
user="user",
37+
password="password",
38+
dbname="dbname",
39+
port=5432,
40+
schema="schema",
41+
)

tests/dbt/test_adapter.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
pytestmark = pytest.mark.dbt
2323

2424

25+
@pytest.mark.slow
2526
def test_adapter_relation(sushi_test_project: Project, runtime_renderer: t.Callable):
2627
context = sushi_test_project.context
2728
assert context.target
@@ -95,6 +96,7 @@ def test_adapter_relation(sushi_test_project: Project, runtime_renderer: t.Calla
9596
assert engine_adapter.table_exists("foo.bar__backup")
9697

9798

99+
@pytest.mark.slow
98100
def test_bigquery_get_columns_in_relation(
99101
sushi_test_project: Project,
100102
runtime_renderer: t.Callable,
@@ -134,6 +136,7 @@ def test_bigquery_get_columns_in_relation(
134136

135137

136138
@pytest.mark.cicdonly
139+
@pytest.mark.slow
137140
def test_normalization(
138141
sushi_test_project: Project, runtime_renderer: t.Callable, mocker: MockerFixture
139142
):
@@ -231,6 +234,7 @@ def test_normalization(
231234
adapter_mock.drop_table.assert_has_calls([call(relation_bla_bob)])
232235

233236

237+
@pytest.mark.slow
234238
def test_adapter_dispatch(sushi_test_project: Project, runtime_renderer: t.Callable):
235239
context = sushi_test_project.context
236240
renderer = runtime_renderer(context)
@@ -243,6 +247,7 @@ def test_adapter_dispatch(sushi_test_project: Project, runtime_renderer: t.Calla
243247

244248

245249
@pytest.mark.parametrize("project_dialect", ["duckdb", "bigquery"])
250+
@pytest.mark.slow
246251
def test_adapter_map_snapshot_tables(
247252
sushi_test_project: Project,
248253
runtime_renderer: t.Callable,
@@ -319,6 +324,7 @@ def test_quote_as_configured():
319324
adapter.quote_as_configured("foo", "database") == "foo"
320325

321326

327+
@pytest.mark.slow
322328
def test_adapter_get_relation_normalization(
323329
sushi_test_project: Project, runtime_renderer: t.Callable
324330
):

0 commit comments

Comments
 (0)