|
2 | 2 |
|
3 | 3 | import datetime |
4 | 4 | import typing as t |
| 5 | +import logging |
5 | 6 |
|
6 | 7 | from sqlglot import exp |
7 | 8 | from sqlglot.errors import SqlglotError |
|
34 | 35 | from sqlmesh.core.audit.definition import ModelAudit |
35 | 36 | from sqlmesh.dbt.context import DbtContext |
36 | 37 |
|
| 38 | +logger = logging.getLogger(__name__) |
| 39 | + |
37 | 40 |
|
38 | 41 | INCREMENTAL_BY_TIME_STRATEGIES = set(["delete+insert", "insert_overwrite", "microbatch"]) |
39 | 42 | INCREMENTAL_BY_UNIQUE_KEY_STRATEGIES = set(["merge"]) |
@@ -522,16 +525,21 @@ def to_sqlmesh( |
522 | 525 | partitioned_by.append(self._big_query_partition_by_expr(context)) |
523 | 526 | optional_kwargs["partitioned_by"] = partitioned_by |
524 | 527 |
|
525 | | - if self.cluster_by and not isinstance(kind, ViewKind): |
526 | | - clustered_by = [] |
527 | | - for c in self.cluster_by: |
528 | | - try: |
529 | | - clustered_by.append(d.parse_one(c, dialect=model_dialect)) |
530 | | - except SqlglotError as e: |
531 | | - raise ConfigError( |
532 | | - f"Failed to parse model '{self.canonical_name(context)}' cluster_by field '{c}' in '{self.path}': {e}" |
533 | | - ) from e |
534 | | - optional_kwargs["clustered_by"] = clustered_by |
| 528 | + if self.cluster_by: |
| 529 | + if isinstance(kind, ViewKind): |
| 530 | + logger.warning( |
| 531 | + f"Ignoring cluster_by config for model '{self.name}'; cluster_by is not supported for views." |
| 532 | + ) |
| 533 | + else: |
| 534 | + clustered_by = [] |
| 535 | + for c in self.cluster_by: |
| 536 | + try: |
| 537 | + clustered_by.append(d.parse_one(c, dialect=model_dialect)) |
| 538 | + except SqlglotError as e: |
| 539 | + raise ConfigError( |
| 540 | + f"Failed to parse model '{self.canonical_name(context)}' cluster_by field '{c}' in '{self.path}': {e}" |
| 541 | + ) from e |
| 542 | + optional_kwargs["clustered_by"] = clustered_by |
535 | 543 |
|
536 | 544 | model_kwargs = self.sqlmesh_model_kwargs(context) |
537 | 545 | if self.sql_header: |
|
0 commit comments