diff --git a/dbt2looker/generator.py b/dbt2looker/generator.py index 9b95438..413d676 100644 --- a/dbt2looker/generator.py +++ b/dbt2looker/generator.py @@ -163,19 +163,41 @@ 'BYTE': 'number', 'SHORT': 'number', 'INTEGER': 'number', + 'INT': 'number', + 'TINYINT': 'number', + 'SMALLINT': 'number', + 'BIGINT': 'number', 'LONG': 'number', 'FLOAT': 'number', 'DOUBLE': 'number', + 'REAL': 'number', 'DECIMAL': 'number', + 'DEC': 'number', + 'NUMERIC': 'number', 'STRING': 'string', 'VARCHAR': 'string', 'CHAR': 'string', + 'BINARY': 'string', 'BOOLEAN': 'yesno', 'TIMESTAMP': 'timestamp', 'DATE': 'datetime', + # ARRAY not supported + # STRUCT not supported + # INTERVAL not supported + # MAP not supported } } +# Databricks is built on the Spark connector and uses the same datatypes +LOOKER_DTYPE_MAP['databricks'] = LOOKER_DTYPE_MAP['spark'] + + +spark_like_adapters = [ + models.SupportedDbtAdapters.databricks.value, + models.SupportedDbtAdapters.spark.value +] + + looker_date_time_types = ['datetime', 'timestamp'] looker_date_types = ['date'] looker_scalar_types = ['number', 'yesno', 'string'] @@ -196,7 +218,7 @@ def normalise_spark_types(column_type: str) -> str: def map_adapter_type_to_looker(adapter_type: models.SupportedDbtAdapters, column_type: str): - normalised_column_type = (normalise_spark_types(column_type) if adapter_type == models.SupportedDbtAdapters.spark.value else column_type).upper() + normalised_column_type = (normalise_spark_types(column_type) if adapter_type in spark_like_adapters else column_type).upper() looker_type = LOOKER_DTYPE_MAP[adapter_type].get(normalised_column_type) if (column_type is not None) and (looker_type is None): logging.warning(f'Column type {column_type} not supported for conversion from {adapter_type} to looker. No dimension will be created.') diff --git a/dbt2looker/models.py b/dbt2looker/models.py index 3430eaf..d2e5c4a 100644 --- a/dbt2looker/models.py +++ b/dbt2looker/models.py @@ -19,6 +19,7 @@ class SupportedDbtAdapters(str, Enum): redshift = 'redshift' snowflake = 'snowflake' spark = 'spark' + databricks = 'databricks' # Lookml types