Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions .circleci/continue_config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -310,10 +310,10 @@ workflows:
- athena
- fabric
- gcp-postgres
filters:
branches:
only:
- main
#filters:
Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TODO: revert

# branches:
# only:
# - main
- ui_style
- ui_test
- vscode_test
Expand Down
5 changes: 5 additions & 0 deletions .circleci/install-prerequisites.sh
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,9 @@ echo "Installing OS-level dependencies: $ALL_DEPENDENCIES"

sudo apt-get clean && sudo apt-get -y update && sudo ACCEPT_EULA='Y' apt-get -y install $ALL_DEPENDENCIES

if [ "$ENGINE" == "spark" ]; then
echo "Using Java version for spark:"
java -version
fi

echo "All done"
4 changes: 3 additions & 1 deletion .circleci/manage-test-db.sh
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,9 @@ databricks_init() {

# Note: the cluster doesnt need to be running to create / drop catalogs, but it does need to be running to run the integration tests
echo "Ensuring cluster is running"
databricks clusters start $CLUSTER_ID
# the || true is to prevent the following error from causing an abort:
# > Error: is in unexpected state Running.
databricks clusters start $CLUSTER_ID || true
}

databricks_up() {
Expand Down
34 changes: 17 additions & 17 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ dbt-test:
pytest -n auto -m "dbt and not cicdonly"

dbt-fast-test:
pytest -n auto -m "dbt and fast" --retries 3
pytest -n auto -m "dbt and fast" --reruns 3

github-test:
pytest -n auto -m "github"
Expand Down Expand Up @@ -173,58 +173,58 @@ engine-%-down:
##################

clickhouse-test: engine-clickhouse-up
pytest -n auto -m "clickhouse" --retries 3 --junitxml=test-results/junit-clickhouse.xml
pytest -n auto -m "clickhouse" --reruns 3 --junitxml=test-results/junit-clickhouse.xml

duckdb-test: engine-duckdb-install
pytest -n auto -m "duckdb" --retries 3 --junitxml=test-results/junit-duckdb.xml
pytest -n auto -m "duckdb" --reruns 3 --junitxml=test-results/junit-duckdb.xml

mssql-test: engine-mssql-up
pytest -n auto -m "mssql" --retries 3 --junitxml=test-results/junit-mssql.xml
pytest -n auto -m "mssql" --reruns 3 --junitxml=test-results/junit-mssql.xml

mysql-test: engine-mysql-up
pytest -n auto -m "mysql" --retries 3 --junitxml=test-results/junit-mysql.xml
pytest -n auto -m "mysql" --reruns 3 --junitxml=test-results/junit-mysql.xml

postgres-test: engine-postgres-up
pytest -n auto -m "postgres" --retries 3 --junitxml=test-results/junit-postgres.xml
pytest -n auto -m "postgres" --reruns 3 --junitxml=test-results/junit-postgres.xml

spark-test: engine-spark-up
pytest -n auto -m "spark" --retries 3 --junitxml=test-results/junit-spark.xml
pytest -n auto -m "spark" --reruns 3 --junitxml=test-results/junit-spark.xml && pytest -n auto -m "pyspark" --reruns 3 --junitxml=test-results/junit-pyspark.xml

trino-test: engine-trino-up
pytest -n auto -m "trino" --retries 3 --junitxml=test-results/junit-trino.xml
pytest -n auto -m "trino" --reruns 3 --junitxml=test-results/junit-trino.xml

risingwave-test: engine-risingwave-up
pytest -n auto -m "risingwave" --retries 3 --junitxml=test-results/junit-risingwave.xml
pytest -n auto -m "risingwave" --reruns 3 --junitxml=test-results/junit-risingwave.xml

#################
# Cloud Engines #
#################

snowflake-test: guard-SNOWFLAKE_ACCOUNT guard-SNOWFLAKE_WAREHOUSE guard-SNOWFLAKE_DATABASE guard-SNOWFLAKE_USER guard-SNOWFLAKE_PASSWORD engine-snowflake-install
pytest -n auto -m "snowflake" --retries 3 --junitxml=test-results/junit-snowflake.xml
pytest -n auto -m "snowflake" --reruns 3 --junitxml=test-results/junit-snowflake.xml

bigquery-test: guard-BIGQUERY_KEYFILE engine-bigquery-install
$(PIP) install -e ".[bigframes]"
pytest -n auto -m "bigquery" --retries 3 --junitxml=test-results/junit-bigquery.xml
pytest -n auto -m "bigquery" --reruns 3 --junitxml=test-results/junit-bigquery.xml

databricks-test: guard-DATABRICKS_CATALOG guard-DATABRICKS_SERVER_HOSTNAME guard-DATABRICKS_HTTP_PATH guard-DATABRICKS_ACCESS_TOKEN guard-DATABRICKS_CONNECT_VERSION engine-databricks-install
$(PIP) install 'databricks-connect==${DATABRICKS_CONNECT_VERSION}'
pytest -n auto -m "databricks" --retries 3 --junitxml=test-results/junit-databricks.xml
pytest -n auto -m "databricks" --reruns 3 --junitxml=test-results/junit-databricks.xml

redshift-test: guard-REDSHIFT_HOST guard-REDSHIFT_USER guard-REDSHIFT_PASSWORD guard-REDSHIFT_DATABASE engine-redshift-install
pytest -n auto -m "redshift" --retries 3 --junitxml=test-results/junit-redshift.xml
pytest -n auto -m "redshift" --reruns 3 --junitxml=test-results/junit-redshift.xml

clickhouse-cloud-test: guard-CLICKHOUSE_CLOUD_HOST guard-CLICKHOUSE_CLOUD_USERNAME guard-CLICKHOUSE_CLOUD_PASSWORD engine-clickhouse-install
pytest -n 1 -m "clickhouse_cloud" --retries 3 --junitxml=test-results/junit-clickhouse-cloud.xml
pytest -n 1 -m "clickhouse_cloud" --reruns 3 --junitxml=test-results/junit-clickhouse-cloud.xml

athena-test: guard-AWS_ACCESS_KEY_ID guard-AWS_SECRET_ACCESS_KEY guard-ATHENA_S3_WAREHOUSE_LOCATION engine-athena-install
pytest -n auto -m "athena" --retries 3 --junitxml=test-results/junit-athena.xml
pytest -n auto -m "athena" --reruns 3 --junitxml=test-results/junit-athena.xml

fabric-test: guard-FABRIC_HOST guard-FABRIC_CLIENT_ID guard-FABRIC_CLIENT_SECRET guard-FABRIC_DATABASE engine-fabric-install
pytest -n auto -m "fabric" --retries 3 --junitxml=test-results/junit-fabric.xml
pytest -n auto -m "fabric" --reruns 3 --junitxml=test-results/junit-fabric.xml

gcp-postgres-test: guard-GCP_POSTGRES_INSTANCE_CONNECTION_STRING guard-GCP_POSTGRES_USER guard-GCP_POSTGRES_PASSWORD guard-GCP_POSTGRES_KEYFILE_JSON engine-gcppostgres-install
pytest -n auto -m "gcp_postgres" --retries 3 --junitxml=test-results/junit-gcp-postgres.xml
pytest -n auto -m "gcp_postgres" --reruns 3 --junitxml=test-results/junit-gcp-postgres.xml

vscode_settings:
mkdir -p .vscode
Expand Down
11 changes: 8 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ dev = [
"pytest",
"pytest-asyncio",
"pytest-mock",
"pytest-retry",
"pytest-rerunfailures",
"pytest-xdist",
"pytz",
"redshift_connector",
Expand Down Expand Up @@ -264,8 +264,13 @@ markers = [
"redshift: test for Redshift",
"snowflake: test for Snowflake",
"spark: test for Spark",
"pyspark: test for PySpark that need to run separately from the other spark tests",
"trino: test for Trino (all connectors)",
"risingwave: test for Risingwave"
"risingwave: test for Risingwave",

# Other
"set_default_connection",
"registry_isolation"
]
addopts = "-n 0 --dist=loadgroup"
asyncio_default_fixture_loop_scope = "session"
Expand All @@ -275,7 +280,7 @@ log_cli_level = "INFO"
filterwarnings = [
"ignore:The localize method is no longer necessary, as this time zone supports the fold attribute"
]
retry_delay = 10
reruns_delay = 10

[tool.ruff]
line-length = 100
Expand Down
2 changes: 2 additions & 0 deletions sqlmesh/core/test/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ class TestExecutionContext(ExecutionContext):
models: All upstream models to use for expansion and mapping of physical locations.
"""

__test__ = False # prevent pytest trying to collect this as a test class

def __init__(
self,
engine_adapter: EngineAdapter,
Expand Down
4 changes: 4 additions & 0 deletions sqlmesh/dbt/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,10 @@ class TestConfig(GeneralConfig):
error_if: Conditional expression (default "!=0") to detect if error condition met (Not supported).
"""

__test__ = (
False # prevent pytest trying to collect this as a test class when it's imported in a test
)

# SQLMesh fields
path: Path = Path()
name: str
Expand Down
78 changes: 0 additions & 78 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,84 +212,6 @@ def pytest_collection_modifyitems(items, *args, **kwargs):
item.add_marker("fast")


@pytest.hookimpl(hookwrapper=True, tryfirst=True)
def pytest_runtest_makereport(item: pytest.Item, call: pytest.CallInfo):
# The tmp_path fixture frequently throws errors like:
# - KeyError: <_pytest.stash.StashKey object at 0x79ba385fe1a0>
# in its teardown. This causes pytest to mark the test as failed even though we have zero control over this behaviour.
# So we log/swallow that particular error here rather than raising it

# note: the hook always has to yield
outcome = yield

# we only care about tests that used the tmp_path fixture
if "tmp_path" not in getattr(item, "fixturenames", []):
return

result: pytest.TestReport = outcome.get_result()

if result.when != "teardown":
return

# If we specifically failed with a StashKey error in teardown, mark the test as passed
if result.failed:
exception = call.excinfo
if (
exception
and isinstance(exception.value, KeyError)
and "_pytest.stash.StashKey" in repr(exception)
):
result.outcome = "passed"
item.add_report_section(
"teardown", "stderr", f"Ignored tmp_path teardown error: {exception}"
)


def pytest_configure(config: pytest.Config):
# we need to adjust the hook order if pytest-retry is present because it:
# - also declares a `pytest_runtest_makereport` with `hookwrapper=True, tryfirst=True`
# - this supersedes our one because pytest always loads plugins first and they take precedence over user code
#
# but, we need our one to run first because it's capturing and ignoring certain errors that cause pytest-retry to fail
# and not retry. so we need to adjust the order the hooks are called which pytest does NOT make easy.
#
# we can't just unload the pytest-retry plugin, load our hook and reload the pytest-retry plugin either.
# this causes an error:
# > Hook 'pytest_set_excluded_exceptions' is already registered within namespace
# because unregister() apparently doesnt unregister plugins cleanly in such a way they can be re-registered
#
# so what we end up doing below is a small monkey-patch to adjust the call order of the hooks
pm = config.pluginmanager

from pluggy._hooks import HookCaller

hook_caller: HookCaller = pm.hook.pytest_runtest_makereport
hook_impls = hook_caller.get_hookimpls()

# find the index of our one
our_makereport_idx = next(
(i for i, v in enumerate(hook_impls) if v.plugin_name.endswith("tests/conftest.py")), None
)

# find the index of the pytest-retry one
pytest_retry_makereport_idx = next(
(i for i, v in enumerate(hook_impls) if v.plugin_name == "pytest-retry"), None
)

if (
pytest_retry_makereport_idx is not None
and our_makereport_idx is not None
and our_makereport_idx > pytest_retry_makereport_idx
):
our_makereport_hook = hook_impls.pop(our_makereport_idx)

# inject our one to run before the pytest-retry one
hook_impls.insert(pytest_retry_makereport_idx, our_makereport_hook)

# HookCaller doesnt have a setter method for this.
hook_caller._hookimpls = hook_impls # type: ignore


# Ignore all local config files
@pytest.fixture(scope="session", autouse=True)
def ignore_local_config_files():
Expand Down
4 changes: 4 additions & 0 deletions tests/dbt/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@


class TestType(str, Enum):
__test__ = False # prevent pytest trying to collect this as a test class

DBT_RUNTIME = "dbt_runtime"
DBT_ADAPTER = "dbt_adapter"
SQLMESH = "sqlmesh"
Expand All @@ -53,6 +55,8 @@ def is_sqlmesh_runtime(self) -> bool:


class TestStrategy(str, Enum):
__test__ = False # prevent pytest trying to collect this as a test class

CHECK = "check"
TIMESTAMP = "timestamp"

Expand Down
8 changes: 4 additions & 4 deletions tests/engines/spark/test_db_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@
from sqlmesh.engines.spark.db_api import errors
from sqlmesh.engines.spark.db_api import spark_session as spark_session_db

pytestmark = [
pytest.mark.slow,
pytest.mark.spark_pyspark,
]
# note: this is deliberately not marked with 'spark' so that it
# can run separately from the spark integration tests.
# running them at the same time mutates some global state in the SparkSession which breaks these tests
pytestmark = [pytest.mark.slow, pytest.mark.pyspark]


def test_spark_session_cursor(spark_session: SparkSession):
Expand Down
2 changes: 2 additions & 0 deletions tests/integrations/github/cicd/test_github_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
from unittest import TestCase, mock
from unittest.result import TestResult

TestResult.__test__ = False # prevent pytest trying to collect this as a test class

import pytest
from pytest_mock.plugin import MockerFixture

Expand Down