@@ -24,8 +24,8 @@ def trino_mocked_engine_adapter(
2424 def mock_catalog_type (catalog_name ):
2525 if "iceberg" in catalog_name :
2626 return "iceberg"
27- if "delta " in catalog_name :
28- return "delta "
27+ if "delta_lake " in catalog_name :
28+ return "delta_lake "
2929 return "hive"
3030
3131 mocker .patch (
@@ -50,7 +50,7 @@ def test_set_current_catalog(trino_mocked_engine_adapter: TrinoEngineAdapter):
5050 ]
5151
5252
53- @pytest .mark .parametrize ("storage_type" , ["iceberg" , "delta " ])
53+ @pytest .mark .parametrize ("storage_type" , ["iceberg" , "delta_lake " ])
5454def test_get_catalog_type (
5555 trino_mocked_engine_adapter : TrinoEngineAdapter , mocker : MockerFixture , storage_type : str
5656):
@@ -64,13 +64,14 @@ def test_get_catalog_type(
6464 assert adapter .get_catalog_type ("foo" ) == TrinoEngineAdapter .DEFAULT_CATALOG_TYPE
6565 assert adapter .get_catalog_type ("datalake_hive" ) == "hive"
6666 assert adapter .get_catalog_type ("datalake_iceberg" ) == "iceberg"
67- assert adapter .get_catalog_type ("datalake_delta " ) == "delta "
67+ assert adapter .get_catalog_type ("datalake_delta_lake " ) == "delta_lake "
6868
6969 mocker .patch (
7070 "sqlmesh.core.engine_adapter.trino.TrinoEngineAdapter.get_current_catalog" ,
7171 return_value = f"system_{ storage_type } " ,
7272 )
73- assert adapter .current_catalog_type == storage_type
73+ expected_current_type = storage_type
74+ assert adapter .current_catalog_type == expected_current_type
7475
7576
7677def test_get_catalog_type_cached (
@@ -103,7 +104,7 @@ def mock_fetchone(sql):
103104 assert fetchone_mock .call_count == 2
104105
105106
106- @pytest .mark .parametrize ("storage_type" , ["hive" , "delta " ])
107+ @pytest .mark .parametrize ("storage_type" , ["hive" , "delta_lake " ])
107108def test_partitioned_by_hive_delta (
108109 trino_mocked_engine_adapter : TrinoEngineAdapter , mocker : MockerFixture , storage_type : str
109110):
@@ -113,7 +114,8 @@ def test_partitioned_by_hive_delta(
113114 "sqlmesh.core.engine_adapter.trino.TrinoEngineAdapter.get_current_catalog" ,
114115 return_value = f"datalake_{ storage_type } " ,
115116 )
116- assert adapter .get_catalog_type (f"datalake_{ storage_type } " ) == storage_type
117+ expected_type = storage_type
118+ assert adapter .get_catalog_type (f"datalake_{ storage_type } " ) == expected_type
117119
118120 columns_to_types = {
119121 "cola" : exp .DataType .build ("INT" ),
@@ -314,7 +316,7 @@ def test_comments_hive(mocker: MockerFixture, make_mocked_engine_adapter: t.Call
314316 ]
315317
316318
317- @pytest .mark .parametrize ("storage_type" , ["iceberg" , "delta " ])
319+ @pytest .mark .parametrize ("storage_type" , ["iceberg" , "delta_lake " ])
318320def test_comments_iceberg_delta (
319321 mocker : MockerFixture , make_mocked_engine_adapter : t .Callable , storage_type : str
320322):
@@ -646,3 +648,40 @@ def test_session_authorization(trino_mocked_engine_adapter: TrinoEngineAdapter):
646648 "SELECT 1" ,
647649 "RESET SESSION AUTHORIZATION" ,
648650 ]
651+
652+
653+ @pytest .mark .parametrize (
654+ "catalog_name,expected_replace" ,
655+ [
656+ ("hive_catalog" , False ),
657+ ("iceberg_catalog" , True ),
658+ ("delta_catalog" , False ),
659+ ("acme_delta_lake" , True ),
660+ ("acme_iceberg" , True ),
661+ ("custom_delta_lake_something" , True ),
662+ ("my_iceberg_store" , True ),
663+ ("plain_catalog" , False ),
664+ ],
665+ )
666+ def test_replace_table_catalog_support (
667+ trino_mocked_engine_adapter : TrinoEngineAdapter , catalog_name , expected_replace
668+ ):
669+ adapter = trino_mocked_engine_adapter
670+
671+ adapter .replace_query (
672+ table_name = "." .join ([catalog_name , "schema" , "test_table" ]),
673+ query_or_df = parse_one ("SELECT 1 AS col" ),
674+ )
675+
676+ sql_calls = to_sql_calls (adapter )
677+ assert len (sql_calls ) == 1
678+ if expected_replace :
679+ assert (
680+ sql_calls [0 ]
681+ == f'CREATE OR REPLACE TABLE "{ catalog_name } "."schema"."test_table" AS SELECT 1 AS "col"'
682+ )
683+ else :
684+ assert (
685+ sql_calls [0 ]
686+ == f'CREATE TABLE IF NOT EXISTS "{ catalog_name } "."schema"."test_table" AS SELECT 1 AS "col"'
687+ )
0 commit comments