Skip to content

Commit 0b4ee9e

Browse files
authored
Fix: Change gateway naming and render False values on project init (#3774)
1 parent 591645c commit 0b4ee9e

File tree

2 files changed

+18
-18
lines changed

2 files changed

+18
-18
lines changed

sqlmesh/cli/example_project.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -48,12 +48,10 @@ def _gen_config(
4848
if isinstance(default_value, Enum):
4949
default_value = default_value.value
5050
elif not isinstance(default_value, PRIMITIVES):
51-
default_value = None
51+
default_value = ""
5252

5353
required = field.is_required() or field_name == "type"
54-
option_str = (
55-
f" {'# ' if not required else ''}{field_name}: {default_value or ''}\n"
56-
)
54+
option_str = f" {'# ' if not required else ''}{field_name}: {default_value}\n"
5755

5856
if required:
5957
required_fields.append(option_str)
@@ -74,22 +72,22 @@ def _gen_config(
7472

7573
default_configs = {
7674
ProjectTemplate.DEFAULT: f"""gateways:
77-
dev:
75+
{dialect}:
7876
connection:
7977
{connection_settings}
8078
81-
default_gateway: dev
79+
default_gateway: {dialect}
8280
8381
model_defaults:
8482
dialect: {dialect}
8583
start: {start or yesterday_ds()}
8684
""",
8785
ProjectTemplate.AIRFLOW: f"""gateways:
88-
dev:
86+
{dialect}:
8987
connection:
9088
{connection_settings}
9189
92-
default_gateway: dev
90+
default_gateway: {dialect}
9391
9492
default_scheduler:
9593
type: airflow

tests/cli/test_cli.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -776,6 +776,7 @@ def test_dlt_pipeline_errors(runner, tmp_path):
776776
assert "Error: Could not attach to pipeline" in result.output
777777

778778

779+
@time_machine.travel(FREEZE_TIME)
779780
def test_plan_dlt(runner, tmp_path):
780781
root_dir = path.abspath(getcwd())
781782
pipeline_path = root_dir + "/examples/sushi_dlt/sushi_pipeline.py"
@@ -790,12 +791,12 @@ def test_plan_dlt(runner, tmp_path):
790791
init_example_project(tmp_path, "duckdb", ProjectTemplate.DLT, "sushi")
791792

792793
expected_config = f"""gateways:
793-
dev:
794+
duckdb:
794795
connection:
795796
type: duckdb
796797
database: {dataset_path}
797798
798-
default_gateway: dev
799+
default_gateway: duckdb
799800
800801
model_defaults:
801802
dialect: duckdb
@@ -947,20 +948,21 @@ def test_plan_dlt(runner, tmp_path):
947948
remove(dataset_path)
948949

949950

950-
def test_init_project_dialects(runner, tmp_path):
951+
@time_machine.travel(FREEZE_TIME)
952+
def test_init_project_dialects(tmp_path):
951953
dialect_to_config = {
952-
"redshift": "# concurrent_tasks: 4\n # register_comments: True\n # pre_ping: \n # pretty_sql: \n # user: \n # password: \n # database: \n # host: \n # port: \n # source_address: \n # unix_sock: \n # ssl: \n # sslmode: \n # timeout: \n # tcp_keepalive: \n # application_name: \n # preferred_role: \n # principal_arn: \n # credentials_provider: \n # region: \n # cluster_identifier: \n # iam: \n # is_serverless: \n # serverless_acct_id: \n # serverless_work_group: ",
953-
"bigquery": "# concurrent_tasks: 1\n # register_comments: True\n # pre_ping: \n # pretty_sql: \n # method: oauth\n # project: \n # execution_project: \n # quota_project: \n # location: \n # keyfile: \n # keyfile_json: \n # token: \n # refresh_token: \n # client_id: \n # client_secret: \n # token_uri: \n # scopes: \n # job_creation_timeout_seconds: \n # job_execution_timeout_seconds: \n # job_retries: 1\n # job_retry_deadline_seconds: \n # priority: \n # maximum_bytes_billed: ",
954-
"snowflake": "account: \n # concurrent_tasks: 4\n # register_comments: True\n # pre_ping: \n # pretty_sql: \n # user: \n # password: \n # warehouse: \n # database: \n # role: \n # authenticator: \n # token: \n # application: Tobiko_SQLMesh\n # private_key: \n # private_key_path: \n # private_key_passphrase: \n # session_parameters: ",
955-
"databricks": "# concurrent_tasks: 1\n # register_comments: True\n # pre_ping: \n # pretty_sql: \n # server_hostname: \n # http_path: \n # access_token: \n # auth_type: \n # oauth_client_id: \n # oauth_client_secret: \n # catalog: \n # http_headers: \n # session_configuration: \n # databricks_connect_server_hostname: \n # databricks_connect_access_token: \n # databricks_connect_cluster_id: \n # databricks_connect_use_serverless: \n # force_databricks_connect: \n # disable_databricks_connect: \n # disable_spark_session: ",
956-
"postgres": "host: \n user: \n password: \n port: \n database: \n # concurrent_tasks: 4\n # register_comments: True\n # pre_ping: True\n # pretty_sql: \n # keepalives_idle: \n # connect_timeout: 10\n # role: \n # sslmode: ",
954+
"redshift": "# concurrent_tasks: 4\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # user: \n # password: \n # database: \n # host: \n # port: \n # source_address: \n # unix_sock: \n # ssl: \n # sslmode: \n # timeout: \n # tcp_keepalive: \n # application_name: \n # preferred_role: \n # principal_arn: \n # credentials_provider: \n # region: \n # cluster_identifier: \n # iam: \n # is_serverless: \n # serverless_acct_id: \n # serverless_work_group: ",
955+
"bigquery": "# concurrent_tasks: 1\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # method: oauth\n # project: \n # execution_project: \n # quota_project: \n # location: \n # keyfile: \n # keyfile_json: \n # token: \n # refresh_token: \n # client_id: \n # client_secret: \n # token_uri: \n # scopes: \n # job_creation_timeout_seconds: \n # job_execution_timeout_seconds: \n # job_retries: 1\n # job_retry_deadline_seconds: \n # priority: \n # maximum_bytes_billed: ",
956+
"snowflake": "account: \n # concurrent_tasks: 4\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # user: \n # password: \n # warehouse: \n # database: \n # role: \n # authenticator: \n # token: \n # application: Tobiko_SQLMesh\n # private_key: \n # private_key_path: \n # private_key_passphrase: \n # session_parameters: ",
957+
"databricks": "# concurrent_tasks: 1\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # server_hostname: \n # http_path: \n # access_token: \n # auth_type: \n # oauth_client_id: \n # oauth_client_secret: \n # catalog: \n # http_headers: \n # session_configuration: \n # databricks_connect_server_hostname: \n # databricks_connect_access_token: \n # databricks_connect_cluster_id: \n # databricks_connect_use_serverless: False\n # force_databricks_connect: False\n # disable_databricks_connect: False\n # disable_spark_session: False",
958+
"postgres": "host: \n user: \n password: \n port: \n database: \n # concurrent_tasks: 4\n # register_comments: True\n # pre_ping: True\n # pretty_sql: False\n # keepalives_idle: \n # connect_timeout: 10\n # role: \n # sslmode: ",
957959
}
958960

959961
for dialect, expected_config in dialect_to_config.items():
960962
init_example_project(tmp_path, dialect=dialect)
961963

962-
config_start = f"gateways:\n dev:\n connection:\n # For more information on configuring the connection to your execution engine, visit:\n # https://sqlmesh.readthedocs.io/en/stable/reference/configuration/#connections\n # https://sqlmesh.readthedocs.io/en/stable/integrations/engines/{dialect}/#connection-options\n type: {dialect}\n "
963-
config_end = f"\n\n\ndefault_gateway: dev\n\nmodel_defaults:\n dialect: {dialect}\n start: {yesterday_ds()}\n"
964+
config_start = f"gateways:\n {dialect}:\n connection:\n # For more information on configuring the connection to your execution engine, visit:\n # https://sqlmesh.readthedocs.io/en/stable/reference/configuration/#connections\n # https://sqlmesh.readthedocs.io/en/stable/integrations/engines/{dialect}/#connection-options\n type: {dialect}\n "
965+
config_end = f"\n\n\ndefault_gateway: {dialect}\n\nmodel_defaults:\n dialect: {dialect}\n start: {yesterday_ds()}\n"
964966

965967
with open(tmp_path / "config.yaml") as file:
966968
config = file.read()

0 commit comments

Comments
 (0)