|
43 | 43 |
|
44 | 44 | logger = logging.getLogger(__name__) |
45 | 45 |
|
46 | | -RECOMMENDED_STATE_SYNC_ENGINES = {"postgres", "gcp_postgres", "mysql", "mssql", "azuresql"} |
| 46 | +RECOMMENDED_STATE_SYNC_ENGINES = { |
| 47 | + "postgres", |
| 48 | + "gcp_postgres", |
| 49 | + "mysql", |
| 50 | + "mssql", |
| 51 | + "azuresql", |
| 52 | +} |
47 | 53 | FORBIDDEN_STATE_SYNC_ENGINES = { |
48 | 54 | # Do not support row-level operations |
49 | 55 | "spark", |
@@ -1684,6 +1690,55 @@ def _extra_engine_config(self) -> t.Dict[str, t.Any]: |
1684 | 1690 | return {"catalog_support": CatalogSupport.SINGLE_CATALOG_ONLY} |
1685 | 1691 |
|
1686 | 1692 |
|
| 1693 | +class FabricConnectionConfig(MSSQLConnectionConfig): |
| 1694 | + """ |
| 1695 | + Fabric Connection Configuration. |
| 1696 | + Inherits most settings from MSSQLConnectionConfig and sets the type to 'fabric'. |
| 1697 | + It is recommended to use the 'pyodbc' driver for Fabric. |
| 1698 | + """ |
| 1699 | + |
| 1700 | + type_: t.Literal["fabric"] = Field(alias="type", default="fabric") # type: ignore |
| 1701 | + DIALECT: t.ClassVar[t.Literal["fabric"]] = "fabric" # type: ignore |
| 1702 | + DISPLAY_NAME: t.ClassVar[t.Literal["Fabric"]] = "Fabric" # type: ignore |
| 1703 | + DISPLAY_ORDER: t.ClassVar[t.Literal[17]] = 17 # type: ignore |
| 1704 | + driver: t.Literal["pyodbc"] = "pyodbc" |
| 1705 | + workspace_id: str |
| 1706 | + tenant_id: str |
| 1707 | + autocommit: t.Optional[bool] = True |
| 1708 | + |
| 1709 | + @property |
| 1710 | + def _engine_adapter(self) -> t.Type[EngineAdapter]: |
| 1711 | + from sqlmesh.core.engine_adapter.fabric import FabricEngineAdapter |
| 1712 | + |
| 1713 | + return FabricEngineAdapter |
| 1714 | + |
| 1715 | + @property |
| 1716 | + def _connection_factory(self) -> t.Callable: |
| 1717 | + # Override to support catalog switching for Fabric |
| 1718 | + base_factory = super()._connection_factory |
| 1719 | + |
| 1720 | + def create_fabric_connection( |
| 1721 | + target_catalog: t.Optional[str] = None, *args: t.Any, **kwargs: t.Any |
| 1722 | + ) -> t.Callable: |
| 1723 | + kwargs["database"] = target_catalog or self.database |
| 1724 | + return base_factory(*args, **kwargs) |
| 1725 | + |
| 1726 | + return create_fabric_connection |
| 1727 | + |
| 1728 | + @property |
| 1729 | + def _extra_engine_config(self) -> t.Dict[str, t.Any]: |
| 1730 | + return { |
| 1731 | + "database": self.database, |
| 1732 | + # more operations than not require a specific catalog to be already active |
| 1733 | + # in particular, create/drop view, create/drop schema and querying information_schema |
| 1734 | + "catalog_support": CatalogSupport.REQUIRES_SET_CATALOG, |
| 1735 | + "workspace_id": self.workspace_id, |
| 1736 | + "tenant_id": self.tenant_id, |
| 1737 | + "user": self.user, |
| 1738 | + "password": self.password, |
| 1739 | + } |
| 1740 | + |
| 1741 | + |
1687 | 1742 | class SparkConnectionConfig(ConnectionConfig): |
1688 | 1743 | """ |
1689 | 1744 | Vanilla Spark Connection Configuration. Use `DatabricksConnectionConfig` for Databricks. |
|
0 commit comments