From 29dde1c64ef87df710667de8a4515e67530a81ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Far=C3=ADas=20Santana?= Date: Fri, 26 Dec 2025 23:52:32 +0100 Subject: [PATCH 1/7] chore: Drop dbt 1.9 and add dbt 1.11 --- .github/workflows/ci.yaml | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index de5a63d..afe1d64 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -35,8 +35,8 @@ jobs: - '3.0.6' - '2.10.4' dbt-version: - - '1.10.13' - - '1.9.10' + - '1.11.2' + - '1.10.17' exclude: # Airflow added 3.13 support in >=3.1 - airflow-version: '3.0.6' @@ -45,10 +45,6 @@ jobs: - airflow-version: '2.10.4' python-version: '3.13' - # Dbt added 3.13 support in >=1.10 - - dbt-version: '1.9.10' - python-version: '3.13' - runs-on: ubuntu-latest steps: - name: Harden Runner @@ -105,7 +101,7 @@ jobs: - name: Static type checking with mypy # We only run mypy on the latest supported versions of Airflow & dbt, - if: matrix.python-version == '3.13' && matrix.airflow-version == '3.1.1' && matrix.dbt-version == '1.10.13' + if: matrix.python-version == '3.13' && matrix.airflow-version == '3.1.1' && matrix.dbt-version == '1.11.12' run: uv run mypy . - name: Code formatting with ruff From 05160da1458af3b2b7281d24dd8d562eeed3c144 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Far=C3=ADas=20Santana?= Date: Sat, 27 Dec 2025 00:01:41 +0100 Subject: [PATCH 2/7] fix: Also run CI when updating CI --- .github/workflows/ci.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index afe1d64..1c02ffb 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -7,6 +7,7 @@ on: - '**.py' - 'pyproject.toml' - 'uv.lock' + - 'ci.yaml' - '!airflow_dbt_python/__version__.py' tags: - "v*" @@ -17,6 +18,7 @@ on: - '**.py' - 'pyproject.toml' - 'uv.lock' + - 'ci.yaml' - '!airflow_dbt_python/__version__.py' jobs: From 20db3c5e78701a520cc36020978c160b4a55c50f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Far=C3=ADas=20Santana?= Date: Sat, 27 Dec 2025 00:05:30 +0100 Subject: [PATCH 3/7] chore: Bump airflow and remove 2.10 as it's no longer latest MWAA version --- .github/workflows/ci.yaml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1c02ffb..944baea 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -33,9 +33,8 @@ jobs: - '3.11' - '3.10' airflow-version: - - '3.1.1' + - '3.1.5' - '3.0.6' - - '2.10.4' dbt-version: - '1.11.2' - '1.10.17' @@ -44,9 +43,6 @@ jobs: - airflow-version: '3.0.6' python-version: '3.13' - - airflow-version: '2.10.4' - python-version: '3.13' - runs-on: ubuntu-latest steps: - name: Harden Runner From 81cf1135a58642ac7aec83658b018832362b37e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Far=C3=ADas=20Santana?= Date: Sat, 27 Dec 2025 00:06:15 +0100 Subject: [PATCH 4/7] fix: Actually run CI on CI changes --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 944baea..b8f653a 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -7,7 +7,7 @@ on: - '**.py' - 'pyproject.toml' - 'uv.lock' - - 'ci.yaml' + - '**/ci.yaml' - '!airflow_dbt_python/__version__.py' tags: - "v*" @@ -18,7 +18,7 @@ on: - '**.py' - 'pyproject.toml' - 'uv.lock' - - 'ci.yaml' + - '**/ci.yaml' - '!airflow_dbt_python/__version__.py' jobs: From 6a0a9a5446f23dd306dce5cfa131830c60e5eea3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Far=C3=ADas=20Santana?= Date: Sat, 27 Dec 2025 00:54:15 +0100 Subject: [PATCH 5/7] fix: Add missing flag --- airflow_dbt_python/utils/configs.py | 1 + 1 file changed, 1 insertion(+) diff --git a/airflow_dbt_python/utils/configs.py b/airflow_dbt_python/utils/configs.py index 083f652..dc7a799 100644 --- a/airflow_dbt_python/utils/configs.py +++ b/airflow_dbt_python/utils/configs.py @@ -178,6 +178,7 @@ class BaseConfig: upgrade: bool = False require_model_names_without_spaces: bool = False + require_ref_searches_node_package_before_root: bool = False exclude_resource_types: list[str] = dataclasses.field( default_factory=list, repr=False ) From fed717ec27fed5b4daf91b6f0199340349b6f0a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Far=C3=ADas=20Santana?= Date: Sat, 27 Dec 2025 15:19:14 +0100 Subject: [PATCH 6/7] chore: Update documentation to clarify supported airflow/dbt versions --- README.md | 22 +++++++++---------- docs/development.rst | 16 ++++++++------ docs/example_dags.rst | 2 +- docs/getting_started.rst | 47 ++++++++++++++++++++++++---------------- 4 files changed, 49 insertions(+), 38 deletions(-) diff --git a/README.md b/README.md index fe78c97..8693b95 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ [![Test coverage](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/tomasfarias/81ef37701aa088d18db8a58ce07c79c7/raw/covbadge.json)](https://github.com/tomasfarias/airflow-dbt-python/actions) [![Documentation](https://readthedocs.org/projects/airflow-dbt-python/badge/?version=latest)](https://airflow-dbt-python.readthedocs.io/en/latest/?badge=latest) -A collection of [Airflow](https://airflow.apache.org/) operators, hooks, and utilities to execute [`dbt`](https://pypi.org/project/dbt-core/) commands. +A collection of [*Airflow*](https://airflow.apache.org/) operators, hooks, and utilities to execute [*dbt*](https://pypi.org/project/dbt-core/) commands. Read the [documentation](https://airflow-dbt-python.readthedocs.io) for examples, installation instructions, and more details. @@ -16,20 +16,20 @@ Read the [documentation](https://airflow-dbt-python.readthedocs.io) for examples Before using *airflow-dbt-python*, ensure you meet the following requirements: * A *dbt* project using [dbt-core](https://pypi.org/project/dbt-core/) version 1.8 or later. -* An Airflow environment using version 2.8 or later. +* An *Airflow* deployment using version 3.0 or later. - * If using any managed service, like AWS MWAA or GCP Cloud Composer 2/3, ensure your environment is created with a supported version of Airflow. - * If self-hosting, Airflow installation instructions can be found in their [official documentation](https://airflow.apache.org/docs/apache-airflow/stable/installation/index.html). + * If using any managed *Airflow* service, like [AWS MWAA](https://aws.amazon.com/managed-workflows-for-apache-airflow/) or [GCP Cloud Composer](https://cloud.google.com/composer), ensure your environment is created with a supported version of *Airflow*. + * If self-hosting, *Airflow* installation instructions can be found in their [official documentation](https://airflow.apache.org/docs/apache-airflow/stable/installation/index.html). -* Running Python 3.9 or later in your Airflow environment. +* Python 3.10 or later. > **Warning** > -> Even though we don't impose any upper limits on versions of Airflow and *dbt*, it's possible that new versions are not supported immediately after release, particularly for *dbt*. We recommend testing the latest versions before upgrading and [reporting any issues](https://github.com/tomasfarias/airflow-dbt-python/issues/new/choose). +> New versions of *Airflow* and *dbt* may introduce breaking changes. We recommend testing any new versions of *Airflow* and *dbt* before upgrading production systems; Please [report any issues](https://github.com/tomasfarias/airflow-dbt-python/issues/new/choose) that may arise during testing so they can be addressed. > **Note** > -> Older versions of Airflow and *dbt* may work with *airflow-dbt-python*, although we cannot guarantee this. Our testing pipeline runs the latest *dbt-core* with the latest Airflow release, and the latest version supported by [AWS MWAA](https://aws.amazon.com/managed-workflows-for-apache-airflow/) and [GCP Cloud Composer 2/3](https://aws.amazon.com/managed-workflows-for-apache-airflow/). +> We only test *airflow-dbt-python* against a limited set of versions of *Airflow* and *dbt*, and try to keep up with the latest releases. For *Airflow*, our policy is to cover with tests the latest release of *Airflow*, the latest version available in [GCP Cloud Composer](https://docs.cloud.google.com/composer/docs/composer-versions), and the latest version available in [AWS MWAA](https://docs.aws.amazon.com/mwaa/latest/userguide/airflow-versions). For *dbt*, our policy is to cover the last two minor versions. ## From PyPI @@ -45,7 +45,7 @@ As a convenience, some *dbt* adapters can be installed by specifying extras. For pip install airflow-dbt-python[redshift] ``` -## From this repo +## Building from source *airflow-dbt-python* can also be built from source by cloning this GitHub repository: @@ -54,10 +54,10 @@ git clone https://github.com/tomasfarias/airflow-dbt-python.git cd airflow-dbt-python ``` -And installing with *uv*: +And build with *uv*: ``` shell -uv pip install . +uv build ``` ## In AWS MWAA @@ -70,7 +70,7 @@ Read the [documentation](https://airflow-dbt-python.readthedocs.io/en/latest/get Add *airflow-dbt-python* to your PyPI packages list. -Read the [documentation](https://cloud.google.com/composer/docs/composer-2/install-python-dependencies#install-pypi) for more a more detailed GCP Cloud Composer 2 installation breakdown. +Refer to the [GCP Cloud Composer documentation](https://cloud.google.com/composer/docs/composer-3/install-python-dependencies#install-pypi) on how to do this. ## In other managed services diff --git a/docs/development.rst b/docs/development.rst index ced6e86..82d3b8f 100644 --- a/docs/development.rst +++ b/docs/development.rst @@ -41,16 +41,18 @@ The additional extras install dependencies required for testing. If testing a sp Support for different versions of *Airflow* ------------------------------------------- -*airflow-dbt-python* supports and is tested with multiple versions of Airflow; as a general rule, besides the latest version of Airflow, we test *airflow-dbt-python* against the latest version available in `AWS MWAA `_, which usually lags behind a few minor versions. We are open to patches to improve backwards compatibility as long as they don't increase maintenance load significantly. +*airflow-dbt-python* is tested against multiple versions of *Airflow*; as a general rule, besides the latest version of Airflow, we test *airflow-dbt-python* against the latest version available in `AWS MWAA `_, which usually lags behind a few minor versions. -If you wish to install a different version of Airflow for testing you may skip the *airflow-providers* extras of the previous section and use *pip* instead to install any versions of *apache-airflow* and required providers. +We are open to patches to improve backwards compatibility as long as they don't increase maintenance load significantly. + +If you wish to install a different version of *Airflow* for testing you may skip the *airflow-providers* extras of the previous section and use *pip* instead to install any versions of *apache-airflow* and required providers. Modifying dependencies ---------------------- -Apache Airflow is a package of significant size that requires a lot of dependencies. Together with *dbt-core*, it's common to find dependency conflicts all over the place. Ultimately, we allow users to figure these issues out themselves, as most of the dependency conflicts are harmless: We do not interact with the *dbt* CLI, so any conflicts with CLI-specific packages can be safely ignored, but these requirements are not optional for *dbt-core*. +*Apache Airflow* is a package of significant size that requires a lot of dependencies. Together with *dbt-core*, it's common to find dependency conflicts all over the place. Ultimately, we allow users to figure these issues out themselves, as most of the dependency conflicts are harmless: We do not interact with the *dbt* CLI, so any conflicts with CLI-specific packages can be safely ignored, but these requirements are not optional for *dbt-core*. -All being said, this presents a problem when we try to add dependencies or modify existing ones. Grabbing a constraints file from `Airflow `_ and adding it as an optional group in ``pyproject.toml`` can be a useful strategy. +All being said, this presents a problem when we try to add dependencies or modify existing ones. Grabbing a constraints file from `*Airflow* `_ and adding it as an optional group in ``pyproject.toml`` can be a useful strategy. Pre-commit hooks ---------------- @@ -86,12 +88,12 @@ Tests are available for all operators, hooks, and utilities. That being said, on .. note:: Unit tests (and *airflow-dbt-python*) assume *dbt* works correctly and do not assert the behavior of the *dbt* commands in depth. -Testing specific requirements -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Requirements +^^^^^^^^^^^^ Unit tests interact with a `PostgreSQL `_ database as a target to run dbt commands. This requires *PostgreSQL* to be installed in your local environment. Installation instructions for all major platforms can be found `here `_. -An Airflow database needs to be initialized in your local environment. This requires choosing a location for it, via the ``AIRFLOW_HOME`` environment variable. The same directory where *airflow-dbt-python* was cloned to can be used for this: +An *Airflow* database needs to be initialized in your local environment. This requires choosing a location for it, via the ``AIRFLOW_HOME`` environment variable. The same directory where *airflow-dbt-python* was cloned to can be used for this: .. code-block:: shell diff --git a/docs/example_dags.rst b/docs/example_dags.rst index e8e4619..1357194 100644 --- a/docs/example_dags.rst +++ b/docs/example_dags.rst @@ -4,7 +4,7 @@ Example DAGs This section contains a few DAGs showing off some dbt pipelines to get you going. .. warning:: - All example DAGs are tested against against ``apache-airflow==2.2.5``. Some changes, like modifying ``import`` statements or changing types, may be required for them to work in environments running other versions of Airflow. + All example DAGs are tested against a subset of *Airflow* versions. Some changes, like modifying ``import`` statements or changing types, may be required for them to work in environments running other versions of *Airflow*. Basic DAG ^^^^^^^^^ diff --git a/docs/getting_started.rst b/docs/getting_started.rst index ee1d44d..401aa6c 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -10,23 +10,23 @@ Requirements Before using *airflow-dbt-python*, ensure you meet the following requirements: * A *dbt* project using `dbt-core `_ version 1.8 or later. -* An Airflow environment using version 2.8 or later. +* An *Airflow* environment using version 3.0 or later. - * If using any managed service, like AWS MWAA or GCP Cloud Composer 2/3, ensure your environment is created with a supported version of Airflow. - * If self-hosting, Airflow installation instructions can be found in their `official documentation `_. + * If using any managed *Airflow* service, like `AWS MWAA `_ or `GCP Cloud Composer `_, ensure your environment is created with a supported version of *Airflow*. + * If self-hosting, *Airflow* installation instructions can be found in their `official documentation `_. -* Running Python 3.9 or later in your Airflow environment. +* Python 3.10 or later. .. warning:: - Even though we don't impose any upper limits on versions of Airflow and *dbt*, it's possible that new versions are not supported immediately after release, particularly for *dbt*. We recommend testing the latest versions before upgrading and `reporting any issues `_. + New versions of *Airflow* and *dbt* may introduce breaking changes. We recommend testing any new versions of *Airflow* and *dbt* before upgrading production systems; Please `report any issues `_ that may arise during testing so they can be addressed. .. note:: - Older versions of Airflow and *dbt* may work with *airflow-dbt-python*, although we cannot guarantee this. Our testing pipeline runs the latest *dbt-core* with the latest Airflow release, and the latest version supported by `AWS MWAA `_ and `GCP Cloud Composer 2/3 `_. + We only test *airflow-dbt-python* against a limited set of versions of *Airflow* and *dbt*, and try to keep up with the latest releases. For *Airflow*, our policy is to cover with tests the latest release of *Airflow*, the latest version available in `GCP Cloud Composer `_, and the latest version available in `AWS MWAA `_. For *dbt*, our policy is to cover the last two minor versions. Installation ------------ -Your installation will vary according to your specific Airflow environment setup. These instructions cover a general approach by installing from PyPI or the GitHub repository, and how to install it in AWS MWAA. Other serviced offerings may require different steps, check the documentation of your managed service. +Your installation will vary according to your specific Airflow environment setup. These instructions cover installing from PyPI, building the GitHub repository directly, installing in AWS MWAA, and installing in GCP Cloud Composer. Other serviced offerings may require different steps, check the documentation of your managed service. From PyPI ^^^^^^^^^ @@ -62,24 +62,18 @@ Building from source git clone https://github.com/tomasfarias/airflow-dbt-python.git cd airflow-dbt-python -And installing with *pip*: +And build with *uv*: .. code-block:: shell - pip install . + uv build -Optionally, any *dbt* adapters can be installed by specifying extras: +In AWS MWAA +^^^^^^^^^^^ -.. code-block:: shell - - pip install .[postgres, redshift, bigquery, snowflake] - -Installing in MWAA -^^^^^^^^^^^^^^^^^^ +Add *airflow-dbt-python* to your ``requirements.txt`` file and edit your *Airflow* environment to use this new ``requirements.txt`` file, or upload it as a plugin. -*airflow-dbt-python* can be installed in an Airflow environment managed by AWS via their `Managed Workflows for Apache Airflow `_ service. - -To do so, include *airflow-dbt-python* in the *requirements.txt* file provided to MWAA, for example: +To do so, include *airflow-dbt-python* in the ``requirements.txt`` file provided to AWS MWAA, for example: .. code-block:: shell :caption: requirements.txt @@ -110,6 +104,21 @@ The wheel file can now be added to your *plugins.zip*, and the requirements can /usr/local/airflow/plugins/airflow_dbt_python-X.Y.Z-py3-none-any.whl +In GCP Cloud Composer +^^^^^^^^^^^^^^^^^^^^^ + +Add *airflow-dbt-python* to your PyPI packages list. + +Refer to the `GCP Cloud Composer documentation `_ on how to do this. + +In other managed services +^^^^^^^^^^^^^^^^^^^^^^^^^ + +*airflow-dbt-python* should be compatible with most or all *Airflow* managed services. Consult the documentation specific to your provider. + +If you notice an issue when installing *airflow-dbt-python* in a specific managed service, please open an `issue `_. + + Accessing a *dbt* project ------------------------- From 2adc9a3533fff57b7657276db8d6e7a973b6142f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Far=C3=ADas=20Santana?= Date: Sat, 27 Dec 2025 15:26:04 +0100 Subject: [PATCH 7/7] chore: Also clarify in development docs --- docs/development.rst | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/docs/development.rst b/docs/development.rst index 82d3b8f..65a83ff 100644 --- a/docs/development.rst +++ b/docs/development.rst @@ -38,15 +38,20 @@ The additional extras install dependencies required for testing. If testing a sp uv pip install -e . -Support for different versions of *Airflow* -------------------------------------------- +Supported *Airflow* versions +---------------------------- -*airflow-dbt-python* is tested against multiple versions of *Airflow*; as a general rule, besides the latest version of Airflow, we test *airflow-dbt-python* against the latest version available in `AWS MWAA `_, which usually lags behind a few minor versions. +*airflow-dbt-python* is tested against multiple versions of *Airflow*; as a policy, besides the latest version of Airflow, we test *airflow-dbt-python* against the latest version available in `AWS MWAA `_, and the latest version available in `GCP Cloud Composer `_. We are open to patches to improve backwards compatibility as long as they don't increase maintenance load significantly. If you wish to install a different version of *Airflow* for testing you may skip the *airflow-providers* extras of the previous section and use *pip* instead to install any versions of *apache-airflow* and required providers. +Supported *dbt* versions +---------------------------- + +*airflow-dbt-python* is tested against multiple versions of *dbt*; as a policy, we cover the latest two minor versions with tests. + Modifying dependencies ---------------------- @@ -91,7 +96,7 @@ Tests are available for all operators, hooks, and utilities. That being said, on Requirements ^^^^^^^^^^^^ -Unit tests interact with a `PostgreSQL `_ database as a target to run dbt commands. This requires *PostgreSQL* to be installed in your local environment. Installation instructions for all major platforms can be found `here `_. +Unit tests interact with a `PostgreSQL `_ database as a target to run *dbt* commands. This requires *PostgreSQL* to be installed in your local environment. Installation instructions for all major platforms can be found `here `_. An *Airflow* database needs to be initialized in your local environment. This requires choosing a location for it, via the ``AIRFLOW_HOME`` environment variable. The same directory where *airflow-dbt-python* was cloned to can be used for this: