Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .circleci/continue_config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,7 @@ jobs:
echo "export SNOWFLAKE_DATABASE='$TEST_DB_NAME'" >> "$BASH_ENV"
echo "export DATABRICKS_CATALOG='$TEST_DB_NAME'" >> "$BASH_ENV"
echo "export REDSHIFT_DATABASE='$TEST_DB_NAME'" >> "$BASH_ENV"
echo "export GCP_POSTGRES_DATABASE='$TEST_DB_NAME'" >> "$BASH_ENV"
- run:
name: Create test database
command: ./.circleci/manage-test-db.sh << parameters.engine >> "$TEST_DB_NAME" up
Expand Down Expand Up @@ -303,6 +304,7 @@ workflows:
- bigquery
- clickhouse-cloud
- athena
- gcp-postgres
filters:
branches:
only:
Expand Down
26 changes: 26 additions & 0 deletions .circleci/manage-test-db.sh
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,32 @@ clickhouse-cloud_init() {
echo "Clickhouse Cloud instance $CLICKHOUSE_CLOUD_HOST is up and running"
}

# GCP Postgres
gcp-postgres_init() {
# Download and start Cloud SQL Proxy
curl -fsSL -o cloud-sql-proxy https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.18.0/cloud-sql-proxy.linux.amd64
chmod +x cloud-sql-proxy
echo "$GCP_POSTGRES_KEYFILE_JSON" > /tmp/keyfile.json
./cloud-sql-proxy --credentials-file /tmp/keyfile.json $GCP_POSTGRES_INSTANCE_CONNECTION_STRING &

# Wait for proxy to start
sleep 5
}

gcp-postgres_exec() {
PGPASSWORD=$GCP_POSTGRES_PASSWORD psql -h 127.0.0.1 -U $GCP_POSTGRES_USER -c "$1" postgres
}

gcp-postgres_up() {
gcp-postgres_exec "create database $1"
}

gcp-postgres_down() {
gcp-postgres_exec "drop database $1"
}



INIT_FUNC="${ENGINE}_init"
UP_FUNC="${ENGINE}_up"
DOWN_FUNC="${ENGINE}_down"
Expand Down
3 changes: 3 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,9 @@ clickhouse-cloud-test: guard-CLICKHOUSE_CLOUD_HOST guard-CLICKHOUSE_CLOUD_USERNA
athena-test: guard-AWS_ACCESS_KEY_ID guard-AWS_SECRET_ACCESS_KEY guard-ATHENA_S3_WAREHOUSE_LOCATION engine-athena-install
pytest -n auto -m "athena" --retries 3 --junitxml=test-results/junit-athena.xml

gcp-postgres-test: guard-GCP_POSTGRES_INSTANCE_CONNECTION_STRING guard-GCP_POSTGRES_USER guard-GCP_POSTGRES_PASSWORD guard-GCP_POSTGRES_KEYFILE_JSON engine-gcppostgres-install
pytest -n auto -m "gcp_postgres" --retries 3 --junitxml=test-results/junit-gcp-postgres.xml

vscode_settings:
mkdir -p .vscode
cp -r ./tooling/vscode/*.json .vscode/
Expand Down
6 changes: 0 additions & 6 deletions sqlmesh/core/config/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -1210,12 +1210,6 @@ def _validate_auth_method(cls, data: t.Any) -> t.Any:
password = data.get("password")
enable_iam_auth = data.get("enable_iam_auth")

if password and enable_iam_auth:
raise ConfigError(
"Invalid GCP Postgres connection configuration - both password and"
" enable_iam_auth set. Use password when connecting to a postgres"
" user and enable_iam_auth 'True' when connecting to an IAM user."
)
if not password and not enable_iam_auth:
raise ConfigError(
"GCP Postgres connection configuration requires either password set"
Expand Down
1 change: 1 addition & 0 deletions tests/core/engine_adapter/integration/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ def pytest_marks(self) -> t.List[MarkDecorator]:
IntegrationTestEngine("bigquery", native_dataframe_type="bigframe", cloud=True),
IntegrationTestEngine("databricks", native_dataframe_type="pyspark", cloud=True),
IntegrationTestEngine("snowflake", native_dataframe_type="snowpark", cloud=True),
IntegrationTestEngine("gcp_postgres", cloud=True),
]

ENGINES_BY_NAME = {e.engine: e for e in ENGINES}
Expand Down
11 changes: 11 additions & 0 deletions tests/core/engine_adapter/integration/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -186,5 +186,16 @@ gateways:
state_connection:
type: duckdb

inttest_gcp_postgres:
connection:
type: gcp_postgres
instance_connection_string: {{ env_var("GCP_POSTGRES_INSTANCE_CONNECTION_STRING") }}
user: {{ env_var("GCP_POSTGRES_USER") }}
password: {{ env_var("GCP_POSTGRES_PASSWORD") }}
keyfile_json: {{ env_var("GCP_POSTGRES_KEYFILE_JSON", "") }}
db: {{ env_var("GCP_POSTGRES_DATABASE") }}
enable_iam_auth: true
check_import: false

model_defaults:
dialect: duckdb
6 changes: 5 additions & 1 deletion tests/core/engine_adapter/integration/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -1587,7 +1587,11 @@ def _normalize_snowflake(name: str, prefix_regex: str = "(sqlmesh__)(.*)"):
k: [_normalize_snowflake(name) for name in v] for k, v in object_names.items()
}

init_example_project(tmp_path, ctx.mark.split("_")[0], schema_name=schema_name)
if ctx.mark.startswith("gcp_postgres"):
engine_type = "gcp_postgres"
else:
engine_type = ctx.mark.split("_")[0]
init_example_project(tmp_path, engine_type, schema_name=schema_name)

config = load_config_from_paths(
Config,
Expand Down
Loading