|
8 | 8 |
|
9 | 9 | from sqlmesh.core import dialect as d |
10 | 10 | from sqlmesh.core.engine_adapter import DatabricksEngineAdapter |
11 | | -from sqlmesh.core.engine_adapter.shared import DataObject |
| 11 | +from sqlmesh.core.engine_adapter.shared import DataObject, DataObjectType |
12 | 12 | from sqlmesh.core.node import IntervalUnit |
13 | 13 | from tests.core.engine_adapter import to_sql_calls |
14 | 14 |
|
@@ -219,3 +219,73 @@ def test_create_table_clustered_by(mocker: MockFixture, make_mocked_engine_adapt |
219 | 219 | assert sql_calls == [ |
220 | 220 | "CREATE TABLE IF NOT EXISTS `test_table` (`cola` INT, `colb` STRING) CLUSTER BY (`cola`)", |
221 | 221 | ] |
| 222 | + |
| 223 | + |
| 224 | +def test_get_data_objects_distinguishes_view_types(mocker): |
| 225 | + adapter = DatabricksEngineAdapter(lambda: None, default_catalog="test_catalog") |
| 226 | + |
| 227 | + # (Databricks requires DBSQL Serverless or Pro warehouse to test materialized views which we do not have setup) |
| 228 | + # so this mocks the fetchdf call to simulate the response we would expect from the correct SQL query |
| 229 | + mock_df = pd.DataFrame( |
| 230 | + [ |
| 231 | + { |
| 232 | + "name": "regular_view", |
| 233 | + "schema": "test_schema", |
| 234 | + "catalog": "test_catalog", |
| 235 | + "type": "view", |
| 236 | + }, |
| 237 | + { |
| 238 | + "name": "mat_view", |
| 239 | + "schema": "test_schema", |
| 240 | + "catalog": "test_catalog", |
| 241 | + "type": "materialized_view", |
| 242 | + }, |
| 243 | + { |
| 244 | + "name": "regular_table", |
| 245 | + "schema": "test_schema", |
| 246 | + "catalog": "test_catalog", |
| 247 | + "type": "table", |
| 248 | + }, |
| 249 | + ] |
| 250 | + ) |
| 251 | + |
| 252 | + mocker.patch.object(adapter, "fetchdf", return_value=mock_df) |
| 253 | + |
| 254 | + data_objects = adapter._get_data_objects( |
| 255 | + schema_name=exp.Table(db="test_schema", catalog="test_catalog") |
| 256 | + ) |
| 257 | + |
| 258 | + adapter.fetchdf.assert_called_once() |
| 259 | + call_args = adapter.fetchdf.call_args |
| 260 | + sql_query_exp = call_args[0][0] |
| 261 | + |
| 262 | + # _get_data_objects query should distinguish between VIEW and MATERIALIZED_VIEW types |
| 263 | + sql_query = sql_query_exp.sql(dialect="databricks") |
| 264 | + assert ( |
| 265 | + "CASE table_type WHEN 'VIEW' THEN 'view' WHEN 'MATERIALIZED_VIEW' THEN 'materialized_view' ELSE 'table' END AS type" |
| 266 | + in sql_query |
| 267 | + ) |
| 268 | + |
| 269 | + objects_by_name = {obj.name: obj for obj in data_objects} |
| 270 | + assert objects_by_name["regular_view"].type == DataObjectType.VIEW |
| 271 | + assert objects_by_name["mat_view"].type == DataObjectType.MATERIALIZED_VIEW |
| 272 | + assert objects_by_name["regular_table"].type == DataObjectType.TABLE |
| 273 | + |
| 274 | + |
| 275 | +def test_drop_data_object_materialized_view_calls_correct_drop(mocker: MockFixture): |
| 276 | + adapter = DatabricksEngineAdapter(lambda: None, default_catalog="test_catalog") |
| 277 | + |
| 278 | + mv_data_object = DataObject( |
| 279 | + catalog="test_catalog", |
| 280 | + schema="test_schema", |
| 281 | + name="test_mv", |
| 282 | + type=DataObjectType.MATERIALIZED_VIEW, |
| 283 | + ) |
| 284 | + |
| 285 | + drop_view_mock = mocker.patch.object(adapter, "drop_view") |
| 286 | + adapter.drop_data_object(mv_data_object) |
| 287 | + |
| 288 | + # Ensure drop_view is called with materialized=True |
| 289 | + drop_view_mock.assert_called_once_with( |
| 290 | + mv_data_object.to_table(), ignore_if_not_exists=True, materialized=True |
| 291 | + ) |
0 commit comments