Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .changes/unreleased/Dependencies-20260202-150038.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
kind: Dependencies
body: Remove dbt dependencies from Metricflow development packages. This will require an environment refresh across metricflow and dbt-metricflow.
time: 2026-02-02T15:00:38.874789-05:00
custom:
Author: tlento
Issue: "1967"
3 changes: 3 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@ repos:
rev: v1.3.0
hooks:
- id: mypy
# dbt-metricflow requires dbt-core, which is not installed in the main metricflow dev environment.
# We typecheck dbt-metricflow separately via direct invocation inside the `make lint` command
exclude: "^dbt-metricflow/"
args: [--show-error-codes]
verbose: true
# "system" means that the current environment will be used to run the hook, not the environment installed by
Expand Down
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ When running any one of the hatch commands, the environment is automatically set
- Run the following commands in your shell, replacing the tags with the appropriate values:
- `export MF_SQL_ENGINE_URL=<YOUR_WAREHOUSE_CONNECTION_URL>`
- `export MF_SQL_ENGINE_PASSWORD=<YOUR_WAREHOUSE_PASSWORD>`
- Run `make test-<engine>` to execute the entire test suite against the target engine. This will also set the `MF_TEST_ADAPTER_TYPE` to the proper engine identifier and pull in and configure the necessary dbt adapter dependencies for query execution. For example, to run tests against BigQuery, run `make test-bigquery`
- Run `make test-<engine>` to execute the entire test suite against the target engine. This will pull in and configure the necessary dependencies for query execution. For example, to run tests against BigQuery, run `make test-bigquery`
- By default, without `MF_SQL_ENGINE_URL` and `MF_SQL_ENGINE_PASSWORD` set, your tests will run against DuckDB.
4. Run the linters with `make lint` at any time, but especially before submitting a PR. We use:
- `Black` for formatting
Expand Down
2 changes: 2 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,8 @@ test-trino:
.PHONY: lint
lint:
hatch -v run dev-env:pre-commit run --verbose --all-files $(ADDITIONAL_PRECOMMIT_OPTIONS)
@echo "\n\nTypechecking dbt-metricflow separately due to dbt-core dependency...\n\n"
cd dbt-metricflow && hatch -v run dev-env:mypy --config-file ../mypy.ini dbt_metricflow

# Running data warehouses locally
.PHONY: postgresql postgres
Expand Down
24 changes: 7 additions & 17 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -106,18 +106,14 @@ features = ["dev-env-requirements"]


[tool.hatch.envs.dev-env.env-vars]
MF_TEST_ADAPTER_TYPE="duckdb"
MF_SQL_ENGINE_URL="duckdb://"
# This allows us to use the classes in the `dbt-metricflow` package for tests without installing the package.
# `dbt-metricflow` can't be installed as it has `metricflow` as a dependency.
PYTHONPATH="metricflow-semantics:dbt-metricflow"
PYTHONPATH="metricflow-semantics"


[tool.hatch.envs.postgres-env.env-vars]
PYTHONPATH="metricflow-semantics:dbt-metricflow"
PYTHONPATH="metricflow-semantics"
MF_SQL_ENGINE_URL="postgresql://metricflow@localhost:5432/metricflow"
MF_SQL_ENGINE_PASSWORD="metricflowing"
MF_TEST_ADAPTER_TYPE="postgres"

[tool.hatch.envs.postgres-env]
description = "Dev environment for working with PostgreSQL"
Expand All @@ -129,17 +125,15 @@ features = ["postgres-env-requirements"]
## configured independently of the hatch env construction

[tool.hatch.envs.bigquery-env.env-vars]
PYTHONPATH="metricflow-semantics:dbt-metricflow"
MF_TEST_ADAPTER_TYPE="bigquery"
PYTHONPATH="metricflow-semantics"

[tool.hatch.envs.bigquery-env]
description = "Dev environment for working with BigQuery"
template = "dev-env"
features = ["bigquery-env-requirements"]

[tool.hatch.envs.databricks-env.env-vars]
PYTHONPATH="metricflow-semantics:dbt-metricflow"
MF_TEST_ADAPTER_TYPE="databricks"
PYTHONPATH="metricflow-semantics"


[tool.hatch.envs.databricks-env]
Expand All @@ -149,19 +143,16 @@ features = ["databricks-env-requirements"]


[tool.hatch.envs.redshift-env.env-vars]
PYTHONPATH="metricflow-semantics:dbt-metricflow"
MF_TEST_ADAPTER_TYPE="redshift"
PYTHONPATH="metricflow-semantics"

[tool.hatch.envs.redshift-env]
description = "Dev environment for working with Redshift"
template = "dev-env"
features = ["redshift-env-requirements"]
extra-dependencies = ["dbt-redshift>=1.8.0, <1.9.0"]


[tool.hatch.envs.snowflake-env.env-vars]
PYTHONPATH="metricflow-semantics:dbt-metricflow"
MF_TEST_ADAPTER_TYPE="snowflake"
PYTHONPATH="metricflow-semantics"

[tool.hatch.envs.snowflake-env]
description = "Dev environment for working with Snowflake"
Expand All @@ -170,8 +161,7 @@ features = ["snowflake-env-requirements"]


[tool.hatch.envs.trino-env.env-vars]
PYTHONPATH="metricflow-semantics:dbt-metricflow"
MF_TEST_ADAPTER_TYPE = "trino"
PYTHONPATH="metricflow-semantics"
MF_SQL_ENGINE_URL = "trino://trino@localhost:8080/memory"
MF_SQL_ENGINE_PASSWORD=""

Expand Down
8 changes: 3 additions & 5 deletions requirements-files/dev-env-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
# SqlAlchemy for new client implementation
# Test SqlClients depend on SqlAlchemy 2.x
sqlalchemy>=2.0.0, <2.1.0
# DuckDB engine for DuckDB-backed, SqlAlchemy-based client
duckdb-engine>=0.13.0, <0.14.0

# dbt dependencies (kept for parallel operation during transition)
dbt-core>=1.10.4, <1.11.0
# Excluding 1.2.1 due to window functions returning incorrect results:
# Excluding duckdb 1.2.1 due to window functions returning incorrect results:
# https://github.com/duckdb/duckdb/issues/16617
# Version 1.4.0 seems to have issues as well, so pinning to <1.4.0 until those are resolved.
duckdb !=1.2.1, <1.4.0
Expand Down
3 changes: 1 addition & 2 deletions requirements-files/requirements-databricks.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
# SqlAlchemy Databricks connector
databricks-sql-connector>=3.0.0, <4.0.0
# SqlAlchemy Databricks connector, includes databricks-sql-connector
databricks-sqlalchemy>=2.0.0, <3.0.0
2 changes: 1 addition & 1 deletion requirements-files/requirements-postgres.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
# SqlAlchemy PostgreSQL connector
# SqlALchemy includes a dialect for PostgreSQL compatible with psycopg2
psycopg2-binary>=2.9.0, <3.0.0

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

Empty file.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

Empty file.
Empty file.
Loading