diff --git a/.github/workflows/formatter.yml b/.github/workflows/formatter.yml index 52ee99193..384c34c37 100644 --- a/.github/workflows/formatter.yml +++ b/.github/workflows/formatter.yml @@ -10,13 +10,36 @@ jobs: name: runner / ruff runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - name: Check files using the ruff formatter + - name: Checkout PR head + uses: actions/checkout@v3 + if: github.event_name == 'pull_request' + with: + repository: ${{ github.event.pull_request.head.repo.full_name }} + ref: ${{ github.event.pull_request.head.ref }} + + - name: Checkout Repo + uses: actions/checkout@v3 + if: github.event_name == 'workflow_dispatch' + + # This is used for forked PRs as write permissions are required to format files + - name: Run and commit changes with `ruff format .` locally on your forked branch to fix errors if they appear + if: ${{ github.event.pull_request.head.repo.fork == true }} + uses: chartboost/ruff-action@v1 + id: ruff_formatter_suggestions + with: + args: format --diff + + # This only runs if the PR is NOT from a forked repo + - name: Format files using ruff + if: ${{ github.event.pull_request.head.repo.fork == false }} uses: chartboost/ruff-action@v1 id: ruff_formatter with: args: format + + # This only runs if the PR is NOT from a forked repo - name: Auto commit ruff formatting + if: ${{ github.event.pull_request.head.repo.fork == false }} uses: stefanzweifel/git-auto-commit-action@v5 with: - commit_message: 'style fixes by ruff' \ No newline at end of file + commit_message: 'style fixes by ruff' diff --git a/.gitignore b/.gitignore index 644d7186d..e1f0a901c 100644 --- a/.gitignore +++ b/.gitignore @@ -149,4 +149,6 @@ benchmark_*.png .vscode # History -.history \ No newline at end of file +.history + +docker-compose-local.yml diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 03eaed229..95b1e7cf6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -66,7 +66,7 @@ Make sure to update the appropriate `TEST_*_CONN_STRING`, so that it will be inc #### Run the tests -You can run the tests with `unittest`. +You can run the tests with `python -m unittest`. When running against multiple databases, the tests can take a long while. @@ -111,6 +111,15 @@ $ poetry install # Install dependencies $ docker-compose up -d mysql postgres # run mysql and postgres dbs in background ``` +If you want to change the configuration of docker-compose and run the DB containers, copy docker-compose.yml into docker-compose-local.yml, make changes and run +```shell-session +$ cp docker-compose.yml docker-compose-local.yml +$ docker-compose -f docker-compose-local.yml up -d mysql postgres # run mysql and postgres dbs in background + ``` +you will also have to set up `tests/local_settings.py` where `TEST_*_CONN_STRING` can be edited + +`docker-compose-local.yml` and `tests/local_settings.py` is git ignored so should not show up in git changes. + [docker-compose]: https://docs.docker.com/compose/install/ **3. Run Unit Tests** diff --git a/README.md b/README.md index 1a0f1ba99..4353f00a3 100644 --- a/README.md +++ b/README.md @@ -9,79 +9,73 @@ data-diff: Compare datasets fast, within or across SQL databases
-> [Make sure to join us at our virtual hands-on lab series where our team walks through live how to get set-up with it!](https://www.datafold.com/virtual-hands-on-lab) +> [Join our live virtual lab series to learn how to set it up!](https://www.datafold.com/virtual-hands-on-lab) -# Use Cases - -## Data Migration & Replication Testing -Compare source to target and check for discrepancies when moving data between systems: -- Migrating to a new data warehouse (e.g., Oracle > Snowflake) -- Converting SQL to a new transformation framework (e.g., stored procedures > dbt) -- Continuously replicating data from an OLTP DB to OLAP DWH (e.g., MySQL > Redshift) - - -## Data Development Testing -Test SQL code and preview changes by comparing development/staging environment data to production: -1. Make a change to some SQL code -2. Run the SQL code to create a new dataset -3. Compare the dataset with its production version or another iteration +# What's a Data Diff? +A data diff is the value-level comparison between two tablesβ€”used to identify critical changes to your data and guarantee data quality. -

- dbt -

+There is a lot you can do with data-diff: you can test SQL code by comparing development or staging environment data to production, or compare source and target data to identify discrepancies when moving data between databases. -
- data-diff integrates with dbt Core to seamlessly compare local development to production datasets +# data-diff OSS & Datafold Cloud +data-diff is an open source utility for running stateless diffs as a great single player experience. - -![data-development-testing](docs/development_testing.png) -
+Scale up with [Datafold Cloud](https://www.datafold.com/) to make data diffing a company-wide experience to both supercharge your data diffing CLI experience (ex: data-diff --dbt --cloud) and run diffs manually in your CI process and within the Datafold UI. This includes [column-level lineage](https://www.datafold.com/column-level-lineage) with BI tool integrations, [CI testing](https://docs.datafold.com/deployment_testing/how_it_works/), faster cross-database diffing, and diff history. -> [dbt Cloud users should check out Datafold's out-of-the-box deployment testing integration](https://www.datafold.com/data-deployment-testing) +# Use Cases -:eyes: **Watch [4-min demo video](https://www.loom.com/share/ad3df969ba6b4298939efb2fbcc14cde)** +### Data Development Testing +When developing SQL code, data-diff helps you validate and preview changes by comparing data between development/staging environments and production. Here's how it works: +1. Make a change to your SQL code +2. Run the SQL code to create a new dataset +3. Compare this dataset with its production version or other iterations -**[Get started with data-diff & dbt](https://docs.datafold.com/development_testing/open_source)** +### Data Migration & Replication Testing +data-diff is a powerful tool for comparing data when you're moving it between systems. Use it to ensure data accuracy and identify discrepancies during tasks like: +- **Migrating** to a new data warehouse (e.g., Oracle -> Snowflake) +- **Validating SQL transformations** from legacy solutions (e.g., stored procedures) to new transformation frameworks (e.g., dbt) +- Continuously **replicating data** from an OLTP database to OLAP data warehouse (e.g., MySQL -> Redshift) -Reach out on the dbt Slack in [#tools-datafold](https://getdbt.slack.com/archives/C03D25A92UU) for advice and support +# dbt Integration +

+ dbt +

+data-diff integrates with [dbt Core](https://github.com/dbt-labs/dbt-core) to seamlessly compare local development to production datasets. -# How it works +Learn more about how data-diff works with dbt: +* Read our docs to get started with [data-diff & dbt](https://docs.datafold.com/development_testing/cli) or :eyes: **watch the [4-min demo video](https://www.loom.com/share/ad3df969ba6b4298939efb2fbcc14cde)** +* dbt Cloud users should check out [Datafold's out-of-the-box deployment testing integration](https://www.datafold.com/data-deployment-testing) +* Get support from the dbt Community Slack in [#tools-datafold](https://getdbt.slack.com/archives/C03D25A92UU) -When comparing the data, `data-diff` utilizes the resources of the underlying databases as much as possible. It has two primary modes of comparison: -## `joindiff` -- Recommended for comparing data within the same database -- Uses the outer join operation to diff the rows as efficiently as possible within the same database -- Fully relies on the underlying database engine for computation -- Requires both datasets to be queryable with a single SQL query -- Time complexity approximates JOIN operation and is largely independent of the number of differences in the dataset +# Getting Started -## `hashdiff` -- Recommended for comparing datasets across different databases -- Can also be helpful in diffing very large tables with few expected differences within the same database -- Employs a divide-and-conquer algorithm based on hashing and binary search -- Can diff data across distinct database engines, e.g., PostgreSQL <> Snowflake -- Time complexity approximates COUNT(*) operation when there are few differences -- Performance degrades when datasets have a large number of differences +### ⚑ Validating dbt model changes between dev and prod +Looking to use data-diff in dbt development? -More information about the algorithm and performance considerations can be found [here](https://github.com/datafold/data-diff/blob/master/docs/technical-explanation.md) +Development testing with Datafold enables you to see the impact of dbt code changes on data as you write the code, whether in your IDE or CLI. -# Get started + Head over to [our `data-diff` + `dbt` documentation](https://docs.datafold.com/development_testing/cli) to get started with a development testing workflow! -## Validating dbt model changes between dev and prod -⚑ Looking to use `data-diff` in dbt development? Head over to [our `data-diff` + `dbt` documentation](https://docs.datafold.com/development_testing/how_it_works) to get started! +### πŸ”€ Compare data tables between databases +1. Install `data-diff` with adapters -## Compare data tables between databases -πŸ”€ To compare data between databases, install `data-diff` with specific database adapters, e.g.: +To compare data between databases, install `data-diff` with specific database adapters. For example, install it for PostgreSQL and Snowflake like this: ``` pip install data-diff 'data-diff[postgresql,snowflake]' -U ``` -Run `data-diff` with connection URIs. In the following example, we compare tables between PostgreSQL and Snowflake using the hashdiff algorithm: +Additionally, you can install all open source supported database adapters as follows. +``` +pip install data-diff 'data-diff[all-dbs]' -U +``` + +2. Run `data-diff` with connection URIs + +Then, we compare tables between PostgreSQL and Snowflake using the hashdiff algorithm: ```bash data-diff \ @@ -93,17 +87,17 @@ data-diff \ -c \ -w ``` +3. Set up your configuration -Run `data-diff` with a `toml` configuration file. In the following example, we compare tables between MotherDuck(hosted DuckDB) and Snowflake using the hashdiff algorithm: +You can use a `toml` configuration file to run your `data-diff` job. In this example, we compare tables between MotherDuck (hosted DuckDB) and Snowflake using the hashdiff algorithm: ```toml ## DATABASE CONNECTION ## -[database.duckdb_connection] +[database.duckdb_connection] driver = "duckdb" # filepath = "datafold_demo.duckdb" # local duckdb file example # filepath = "md:" # default motherduck connection example filepath = "md:datafold_demo?motherduck_token=${motherduck_token}" # API token recommended for motherduck connection - database = "datafold_demo" [database.snowflake_connection] driver = "snowflake" @@ -132,8 +126,12 @@ Run `data-diff` with a `toml` configuration file. In the following example, we c verbose = false ``` +4. Run your `data-diff` job + +Make sure to export relevant environment variables as needed. For example, we compare data based on the earlier configuration: ```bash + # export relevant environment variables, example below export motherduck_token= @@ -148,11 +146,13 @@ data-diff --conf datadiff.toml \ + 1, returned ``` -Check out [documentation](https://docs.datafold.com/reference/open_source/cli) for the full command reference. +5. Review the output +After running your data-diff job, review the output to identify and analyze differences in your data. -# Supported databases +Check out [documentation](https://docs.datafold.com/reference/open_source/cli) for the full command reference. +# Supported databases | Database | Status | Connection string | |---------------|-------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------| @@ -161,8 +161,9 @@ Check out [documentation](https://docs.datafold.com/reference/open_source/cli) f | Snowflake | 🟒 | `"snowflake://[:]@//?warehouse=&role=[&authenticator=externalbrowser]"` | | BigQuery | 🟒 | `bigquery:///` | | Redshift | 🟒 | `redshift://:@:5439/` | -| DuckDB | 🟒 | `duckdb://@` | -| MotherDuck | 🟒 | `duckdb://@` | +| DuckDB | 🟒 | `duckdb://` | +| MotherDuck | 🟒 | `duckdb://` | +| Microsoft SQL Server | 🟒 | `mssql://:@//` | | Oracle | 🟑 | `oracle://:@/servive_or_sid` | | Presto | 🟑 | `presto://:@:8080/` | | Databricks | 🟑 | `databricks://:@//` | @@ -172,8 +173,7 @@ Check out [documentation](https://docs.datafold.com/reference/open_source/cli) f | ElasticSearch | πŸ“ | | | Planetscale | πŸ“ | | | Pinot | πŸ“ | | -| Druid | πŸ“ | | -| Kafka | πŸ“ | | +| Druid | πŸ“ | | | | SQLite | πŸ“ | | * 🟒: Implemented and thoroughly tested. @@ -189,10 +189,43 @@ Your database not listed here?
+# How it works + +`data-diff` efficiently compares data using two modes: + +**joindiff**: Ideal for comparing data within the same database, utilizing outer joins for efficient row comparisons. It relies on the database engine for computation and has consistent performance. + +**hashdiff**: Recommended for comparing datasets across different databases or large tables with minimal differences. It uses hashing and binary search, capable of diffing data across distinct database engines. + +
+Click here to learn more about joindiff and hashdiff + +### `joindiff` +* Recommended for comparing data within the same database +* Uses the outer join operation to diff the rows as efficiently as possible within the same database +* Fully relies on the underlying database engine for computation +* Requires both datasets to be queryable with a single SQL query +* Time complexity approximates JOIN operation and is largely independent of the number of differences in the dataset + +### `hashdiff`: +* Recommended for comparing datasets across different databases +* Can also be helpful in diffing very large tables with few expected differences within the same database +* Employs a divide-and-conquer algorithm based on hashing and binary search +* Can diff data across distinct database engines, e.g., PostgreSQL <> Snowflake +* Time complexity approximates COUNT(*) operation when there are few differences +* Performance degrades when datasets have a large number of differences + +
+
+ +For detailed algorithm and performance insights, explore [here](https://github.com/datafold/data-diff/blob/master/docs/technical-explanation.md), or head to our docs to [learn more about how Datafold diffs data](https://docs.datafold.com/data_diff/how-datafold-diffs-data). + ## Contributors We thank everyone who contributed so far! +We'd love to see your face here: [Contributing Instructions](CONTRIBUTING.md) + diff --git a/data_diff/__main__.py b/data_diff/__main__.py index 06140ad1b..ffd9bfa79 100644 --- a/data_diff/__main__.py +++ b/data_diff/__main__.py @@ -1,34 +1,32 @@ -from copy import deepcopy -from datetime import datetime +import json +import logging import os import sys import time -import json -import logging +from copy import deepcopy +from datetime import datetime from itertools import islice -from typing import Dict, Optional, Tuple +from typing import Dict, Optional, Tuple, Union, List, Set +import click import rich from rich.logging import RichHandler -import click - -from data_diff import Database -from data_diff.schema import create_schema -from data_diff.queries.api import current_timestamp +from data_diff import Database, DbPath +from data_diff.config import apply_config_from_file +from data_diff.databases._connect import connect from data_diff.dbt import dbt_diff -from data_diff.utils import eval_name_template, remove_password_from_url, safezip, match_like, LogStatusHandler -from data_diff.diff_tables import Algorithm +from data_diff.diff_tables import Algorithm, TableDiffer from data_diff.hashdiff_tables import HashDiffer, DEFAULT_BISECTION_THRESHOLD, DEFAULT_BISECTION_FACTOR from data_diff.joindiff_tables import TABLE_WRITE_LIMIT, JoinDiffer -from data_diff.table_segment import TableSegment -from data_diff.databases._connect import connect from data_diff.parse_time import parse_time_before, UNITS_STR, ParseError -from data_diff.config import apply_config_from_file +from data_diff.queries.api import current_timestamp +from data_diff.schema import RawColumnInfo, create_schema +from data_diff.table_segment import TableSegment from data_diff.tracking import disable_tracking, set_entrypoint_name +from data_diff.utils import eval_name_template, remove_password_from_url, safezip, match_like, LogStatusHandler from data_diff.version import __version__ - COLOR_SCHEME = { "+": "green", "-": "red", @@ -72,12 +70,12 @@ def _remove_passwords_in_dict(d: dict) -> None: d[k] = remove_password_from_url(https://github.com/v) -def _get_schema(pair): +def _get_schema(pair: Tuple[Database, DbPath]) -> Dict[str, RawColumnInfo]: db, table_path = pair return db.query_table_schema(table_path) -def diff_schemas(table1, table2, schema1, schema2, columns): +def diff_schemas(table1, table2, schema1, schema2, columns) -> None: logging.info("Diffing schemas...") attrs = "name", "type", "datetime_precision", "numeric_precision", "numeric_scale" for c in columns: @@ -103,7 +101,7 @@ def diff_schemas(table1, table2, schema1, schema2, columns): class MyHelpFormatter(click.HelpFormatter): - def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: super().__init__(self, **kwargs) self.indent_increment = 6 @@ -281,7 +279,7 @@ def write_usage(self, prog: str, args: str = "", prefix: Optional[str] = None) - default=None, help="Override the dbt production schema configuration within dbt_project.yml", ) -def main(conf, run, **kw): +def main(conf, run, **kw) -> None: log_handlers = _get_log_handlers(kw["dbt"]) if kw["table2"] is None and kw["database2"]: # Use the "database table table" form @@ -341,14 +339,150 @@ def main(conf, run, **kw): production_schema_flag=kw["prod_schema"], ) else: - return _data_diff( - dbt_project_dir=project_dir_override, dbt_profiles_dir=profiles_dir_override, state=state, **kw - ) + _data_diff(dbt_project_dir=project_dir_override, dbt_profiles_dir=profiles_dir_override, state=state, **kw) except Exception as e: logging.error(e) raise +def _get_dbs( + threads: int, database1: str, threads1: int, database2: str, threads2: int, interactive: bool +) -> Tuple[Database, Database]: + db1 = connect(database1, threads1 or threads) + if database1 == database2: + db2 = db1 + else: + db2 = connect(database2, threads2 or threads) + + if interactive: + db1.enable_interactive() + db2.enable_interactive() + + return db1, db2 + + +def _set_age(options: dict, min_age: Optional[str], max_age: Optional[str], db: Database) -> None: + if min_age or max_age: + now: datetime = db.query(current_timestamp(), datetime).replace(tzinfo=None) + try: + if max_age: + options["min_update"] = parse_time_before(now, max_age) + if min_age: + options["max_update"] = parse_time_before(now, min_age) + except ParseError as e: + logging.error(f"Error while parsing age expression: {e}") + + +def _get_table_differ( + algorithm: str, + db1: Database, + db2: Database, + threaded: bool, + threads: int, + assume_unique_key: bool, + sample_exclusive_rows: bool, + materialize_all_rows: bool, + table_write_limit: int, + materialize_to_table: Optional[str], + bisection_factor: Optional[int], + bisection_threshold: Optional[int], +) -> TableDiffer: + algorithm = Algorithm(algorithm) + if algorithm == Algorithm.AUTO: + algorithm = Algorithm.JOINDIFF if db1 == db2 else Algorithm.HASHDIFF + + logging.info(f"Using algorithm '{algorithm.name.lower()}'.") + + if algorithm == Algorithm.JOINDIFF: + return JoinDiffer( + threaded=threaded, + max_threadpool_size=threads and threads * 2, + validate_unique_key=not assume_unique_key, + sample_exclusive_rows=sample_exclusive_rows, + materialize_all_rows=materialize_all_rows, + table_write_limit=table_write_limit, + materialize_to_table=( + materialize_to_table and db1.dialect.parse_table_name(eval_name_template(materialize_to_table)) + ), + ) + + assert algorithm == Algorithm.HASHDIFF + return HashDiffer( + bisection_factor=DEFAULT_BISECTION_FACTOR if bisection_factor is None else bisection_factor, + bisection_threshold=DEFAULT_BISECTION_THRESHOLD if bisection_threshold is None else bisection_threshold, + threaded=threaded, + max_threadpool_size=threads and threads * 2, + ) + + +def _print_result(stats, json_output, diff_iter) -> None: + if stats: + if json_output: + rich.print(json.dumps(diff_iter.get_stats_dict())) + else: + rich.print(diff_iter.get_stats_string()) + + else: + for op, values in diff_iter: + color = COLOR_SCHEME.get(op, "grey62") + + if json_output: + jsonl = json.dumps([op, list(values)]) + rich.print(f"[{color}]{jsonl}[/{color}]") + else: + text = f"{op} {', '.join(map(str, values))}" + rich.print(f"[{color}]{text}[/{color}]") + + sys.stdout.flush() + + +def _get_expanded_columns( + columns: List[str], + case_sensitive: bool, + mutual: Set[str], + db1: Database, + schema1: dict, + table1: str, + db2: Database, + schema2: dict, + table2: str, +) -> Set[str]: + expanded_columns: Set[str] = set() + for c in columns: + cc = c if case_sensitive else c.lower() + match = set(match_like(cc, mutual)) + if not match: + m1 = None if any(match_like(cc, schema1.keys())) else f"{db1}/{table1}" + m2 = None if any(match_like(cc, schema2.keys())) else f"{db2}/{table2}" + not_matched = ", ".join(m for m in [m1, m2] if m) + raise ValueError(f"Column '{c}' not found in: {not_matched}") + + expanded_columns |= match + return expanded_columns + + +def _get_threads(threads: Union[int, str, None], threads1: Optional[int], threads2: Optional[int]) -> Tuple[bool, int]: + threaded = True + if threads is None: + threads = 1 + elif isinstance(threads, str) and threads.lower() == "serial": + assert not (threads1 or threads2) + threaded = False + threads = 1 + else: + try: + threads = int(threads) + except ValueError: + logging.error("Error: threads must be a number, or 'serial'.") + raise + + if threads < 1: + logging.error("Error: threads must be >= 1") + raise ValueError("Error: threads must be >= 1") + + return threaded, threads + + def _data_diff( database1, table1, @@ -389,32 +523,13 @@ def _data_diff( threads1=None, threads2=None, __conf__=None, -): +) -> None: if limit and stats: logging.error("Cannot specify a limit when using the -s/--stats switch") return key_columns = key_columns or ("id",) - bisection_factor = DEFAULT_BISECTION_FACTOR if bisection_factor is None else int(bisection_factor) - bisection_threshold = DEFAULT_BISECTION_THRESHOLD if bisection_threshold is None else int(bisection_threshold) - - threaded = True - if threads is None: - threads = 1 - elif isinstance(threads, str) and threads.lower() == "serial": - assert not (threads1 or threads2) - threaded = False - threads = 1 - else: - try: - threads = int(threads) - except ValueError: - logging.error("Error: threads must be a number, or 'serial'.") - return - if threads < 1: - logging.error("Error: threads must be >= 1") - return - + threaded, threads = _get_threads(threads, threads1, threads2) start = time.monotonic() if database1 is None or database2 is None: @@ -423,133 +538,79 @@ def _data_diff( ) return - db1 = connect(database1, threads1 or threads) - if database1 == database2: - db2 = db1 - else: - db2 = connect(database2, threads2 or threads) - - options = dict( - case_sensitive=case_sensitive, - where=where, - ) - - if min_age or max_age: - now: datetime = db1.query(current_timestamp(), datetime) - now = now.replace(tzinfo=None) - try: - if max_age: - options["min_update"] = parse_time_before(now, max_age) - if min_age: - options["max_update"] = parse_time_before(now, min_age) - except ParseError as e: - logging.error(f"Error while parsing age expression: {e}") - return - - dbs: Tuple[Database, Database] = db1, db2 - - if interactive: - for db in dbs: - db.enable_interactive() - - algorithm = Algorithm(algorithm) - if algorithm == Algorithm.AUTO: - algorithm = Algorithm.JOINDIFF if db1 == db2 else Algorithm.HASHDIFF - - if algorithm == Algorithm.JOINDIFF: - differ = JoinDiffer( - threaded=threaded, - max_threadpool_size=threads and threads * 2, - validate_unique_key=not assume_unique_key, - sample_exclusive_rows=sample_exclusive_rows, - materialize_all_rows=materialize_all_rows, - table_write_limit=table_write_limit, - materialize_to_table=materialize_to_table - and db1.dialect.parse_table_name(eval_name_template(materialize_to_table)), - ) - else: - assert algorithm == Algorithm.HASHDIFF - differ = HashDiffer( - bisection_factor=bisection_factor, - bisection_threshold=bisection_threshold, - threaded=threaded, - max_threadpool_size=threads and threads * 2, + db1: Database + db2: Database + db1, db2 = _get_dbs(threads, database1, threads1, database2, threads2, interactive) + with db1, db2: + options = { + "case_sensitive": case_sensitive, + "where": where, + } + + _set_age(options, min_age, max_age, db1) + dbs: Tuple[Database, Database] = db1, db2 + + differ = _get_table_differ( + algorithm, + db1, + db2, + threaded, + threads, + assume_unique_key, + sample_exclusive_rows, + materialize_all_rows, + table_write_limit, + materialize_to_table, + bisection_factor, + bisection_threshold, ) - table_names = table1, table2 - table_paths = [db.dialect.parse_table_name(t) for db, t in safezip(dbs, table_names)] + table_names = table1, table2 + table_paths = [db.dialect.parse_table_name(t) for db, t in safezip(dbs, table_names)] - schemas = list(differ._thread_map(_get_schema, safezip(dbs, table_paths))) - schema1, schema2 = schemas = [ - create_schema(db.name, table_path, schema, case_sensitive) - for db, table_path, schema in safezip(dbs, table_paths, schemas) - ] + schemas = list(differ._thread_map(_get_schema, safezip(dbs, table_paths))) + schema1, schema2 = schemas = [ + create_schema(db.name, table_path, schema, case_sensitive) + for db, table_path, schema in safezip(dbs, table_paths, schemas) + ] - mutual = schema1.keys() & schema2.keys() # Case-aware, according to case_sensitive - logging.debug(f"Available mutual columns: {mutual}") - - expanded_columns = set() - for c in columns: - cc = c if case_sensitive else c.lower() - match = set(match_like(cc, mutual)) - if not match: - m1 = None if any(match_like(cc, schema1.keys())) else f"{db1}/{table1}" - m2 = None if any(match_like(cc, schema2.keys())) else f"{db2}/{table2}" - not_matched = ", ".join(m for m in [m1, m2] if m) - raise ValueError(f"Column '{c}' not found in: {not_matched}") + mutual = schema1.keys() & schema2.keys() # Case-aware, according to case_sensitive + logging.debug(f"Available mutual columns: {mutual}") - expanded_columns |= match - - columns = tuple(expanded_columns - {*key_columns, update_column}) - - if db1 == db2: - diff_schemas( - table_names[0], - table_names[1], - schema1, - schema2, - ( - *key_columns, - update_column, - *columns, - ), + expanded_columns = _get_expanded_columns( + columns, case_sensitive, mutual, db1, schema1, table1, db2, schema2, table2 ) + columns = tuple(expanded_columns - {*key_columns, update_column}) + + if db1 == db2: + diff_schemas( + table_names[0], + table_names[1], + schema1, + schema2, + ( + *key_columns, + update_column, + *columns, + ), + ) - logging.info(f"Diffing using columns: key={key_columns} update={update_column} extra={columns}.") - logging.info(f"Using algorithm '{algorithm.name.lower()}'.") - - segments = [ - TableSegment(db, table_path, key_columns, update_column, columns, **options)._with_raw_schema(raw_schema) - for db, table_path, raw_schema in safezip(dbs, table_paths, schemas) - ] - - diff_iter = differ.diff_tables(*segments) - - if limit: - assert not stats - diff_iter = islice(diff_iter, int(limit)) + logging.info(f"Diffing using columns: key={key_columns} update={update_column} extra={columns}.") - if stats: - if json_output: - rich.print(json.dumps(diff_iter.get_stats_dict())) - else: - rich.print(diff_iter.get_stats_string()) + segments = [ + TableSegment(db, table_path, key_columns, update_column, columns, **options)._with_raw_schema(raw_schema) + for db, table_path, raw_schema in safezip(dbs, table_paths, schemas) + ] - else: - for op, values in diff_iter: - color = COLOR_SCHEME.get(op, "grey62") + diff_iter = differ.diff_tables(*segments) - if json_output: - jsonl = json.dumps([op, list(values)]) - rich.print(f"[{color}]{jsonl}[/{color}]") - else: - text = f"{op} {', '.join(map(str, values))}" - rich.print(f"[{color}]{text}[/{color}]") + if limit: + assert not stats + diff_iter = islice(diff_iter, int(limit)) - sys.stdout.flush() + _print_result(stats, json_output, diff_iter) end = time.monotonic() - logging.info(f"Duration: {end-start:.2f} seconds.") diff --git a/data_diff/abcs/database_types.py b/data_diff/abcs/database_types.py index f3c6381a0..2860d2149 100644 --- a/data_diff/abcs/database_types.py +++ b/data_diff/abcs/database_types.py @@ -1,6 +1,6 @@ import decimal from abc import ABC, abstractmethod -from typing import List, Optional, Tuple, Type, TypeVar, Union +from typing import Collection, List, Optional, Tuple, Type, TypeVar, Union from datetime import datetime import attrs @@ -15,6 +15,91 @@ N = TypeVar("N") +@attrs.frozen(kw_only=True, eq=False, order=False, unsafe_hash=True) +class Collation: + """ + A pre-parsed or pre-known record about db collation, per column. + + The "greater" collation should be used as a target collation for textual PKs + on both sides of the diff β€” by coverting the "lesser" collation to self. + + Snowflake easily absorbs the performance losses, so it has a boost to always + be greater than any other collation in non-Snowflake databases. + Other databases need to negotiate which side absorbs the performance impact. + """ + + # A boost for special databases that are known to absorb the performance dmaage well. + absorbs_damage: bool = False + + # Ordinal soring by ASCII/UTF8 (True), or alphabetic as per locale/country/etc (False). + ordinal: Optional[bool] = None + + # Lowercase first (aAbBcC or abcABC). Otherwise, uppercase first (AaBbCc or ABCabc). + lower_first: Optional[bool] = None + + # 2-letter lower-case locale and upper-case country codes, e.g. en_US. Ignored for ordinals. + language: Optional[str] = None + country: Optional[str] = None + + # There are also space-, punctuation-, width-, kana-(in)sensitivity, so on. + # Ignore everything not related to xdb alignment. Only case- & accent-sensitivity are common. + case_sensitive: Optional[bool] = None + accent_sensitive: Optional[bool] = None + + # Purely informational, for debugging: + _source: Union[None, str, Collection[str]] = None + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Collation): + return NotImplemented + if self.ordinal and other.ordinal: + # TODO: does it depend on language? what does Albanic_BIN mean in MS SQL? + return True + return ( + self.language == other.language + and (self.country is None or other.country is None or self.country == other.country) + and self.case_sensitive == other.case_sensitive + and self.accent_sensitive == other.accent_sensitive + and self.lower_first == other.lower_first + ) + + def __ne__(self, other: object) -> bool: + if not isinstance(other, Collation): + return NotImplemented + return not self.__eq__(other) + + def __gt__(self, other: object) -> bool: + if not isinstance(other, Collation): + return NotImplemented + if self == other: + return False + if self.absorbs_damage and not other.absorbs_damage: + return False + if other.absorbs_damage and not self.absorbs_damage: + return True # this one is preferred if it cannot absorb damage as its counterpart can + if self.ordinal and not other.ordinal: + return True + if other.ordinal and not self.ordinal: + return False + # TODO: try to align the languages & countries? + return False + + def __ge__(self, other: object) -> bool: + if not isinstance(other, Collation): + return NotImplemented + return self == other or self.__gt__(other) + + def __lt__(self, other: object) -> bool: + if not isinstance(other, Collation): + return NotImplemented + return self != other and not self.__gt__(other) + + def __le__(self, other: object) -> bool: + if not isinstance(other, Collation): + return NotImplemented + return self == other or not self.__gt__(other) + + @attrs.define(frozen=True, kw_only=True) class ColType: # Arbitrary metadata added and fetched at runtime. @@ -71,6 +156,11 @@ class Date(TemporalType): pass +@attrs.define(frozen=True) +class Time(TemporalType): + pass + + @attrs.define(frozen=True) class NumericType(ColType): # 'precision' signifies how many fractional digits (after the dot) we want to compare @@ -97,6 +187,8 @@ def python_type(self) -> type: "Return the equivalent Python type of the key" def make_value(self, value): + if isinstance(value, self.python_type): + return value return self.python_type(value) @@ -112,6 +204,7 @@ def python_type(self) -> type: @attrs.define(frozen=True) class StringType(ColType): python_type = str + collation: Optional[Collation] = attrs.field(default=None, kw_only=True) @attrs.define(frozen=True) @@ -131,7 +224,14 @@ class Native_UUID(ColType_UUID): @attrs.define(frozen=True) class String_UUID(ColType_UUID, StringType): - pass + # Case is important for UUIDs stored as regular string, not native UUIDs stored as numbers. + # We slice them internally as numbers, but render them back to SQL as lower/upper case. + # None means we do not know for sure, behave as with False, but it might be unreliable. + lowercase: Optional[bool] = None + uppercase: Optional[bool] = None + + def make_value(self, v: str) -> ArithUUID: + return self.python_type(v, lowercase=self.lowercase, uppercase=self.uppercase) @attrs.define(frozen=True) @@ -144,9 +244,6 @@ def test_value(value: str) -> bool: except ValueError: return False - def make_value(self, value): - return self.python_type(value) - @attrs.define(frozen=True) class String_VaryingAlphanum(String_Alphanum): @@ -158,6 +255,8 @@ class String_FixedAlphanum(String_Alphanum): length: int def make_value(self, value): + if isinstance(value, self.python_type): + return value if len(value) != self.length: raise ValueError(f"Expected alphanumeric value of length {self.length}, but got '{value}'.") return self.python_type(value, max_len=self.length) @@ -196,7 +295,7 @@ class Integer(NumericType, IKey): precision: int = 0 python_type: type = int - def __attrs_post_init__(self): + def __attrs_post_init__(self) -> None: assert self.precision == 0 diff --git a/data_diff/cloud/data_source.py b/data_diff/cloud/data_source.py index 32fd89b92..3f3b2e16f 100644 --- a/data_diff/cloud/data_source.py +++ b/data_diff/cloud/data_source.py @@ -46,7 +46,7 @@ def process_response(self, value: str) -> str: return value -def _validate_temp_schema(temp_schema: str): +def _validate_temp_schema(temp_schema: str) -> None: if len(temp_schema.split(".")) != 2: raise ValueError("Temporary schema should have a format .") diff --git a/data_diff/cloud/datafold_api.py b/data_diff/cloud/datafold_api.py index 3ec637a02..f6548abbf 100644 --- a/data_diff/cloud/datafold_api.py +++ b/data_diff/cloud/datafold_api.py @@ -144,18 +144,22 @@ class TSummaryResultSchemaStats(pydantic.BaseModel): exclusive_columns: Tuple[List[str], List[str]] +class TSummaryResultDependencyDetails(pydantic.BaseModel): + deps: Dict[str, List[Dict]] + + class TCloudApiDataDiffSummaryResult(pydantic.BaseModel): status: str pks: Optional[TSummaryResultPrimaryKeyStats] values: Optional[TSummaryResultValueStats] schema_: Optional[TSummaryResultSchemaStats] - dependencies: Optional[Dict[str, Any]] + deps: Optional[TSummaryResultDependencyDetails] @classmethod def from_orm(cls, obj: Any) -> Self: pks = TSummaryResultPrimaryKeyStats(**obj["pks"]) if "pks" in obj else None values = TSummaryResultValueStats(**obj["values"]) if "values" in obj else None - deps = obj["deps"] if "deps" in obj else None + deps = TSummaryResultDependencyDetails(**obj["dependencies"]) if "dependencies" in obj else None schema = TSummaryResultSchemaStats(**obj["schema"]) if "schema" in obj else None return cls( status=obj["status"], @@ -185,7 +189,7 @@ class DatafoldAPI: host: str = "https://app.datafold.com/" timeout: int = 30 - def __attrs_post_init__(self): + def __attrs_post_init__(self) -> None: self.host = self.host.rstrip("/") self.headers = { "Authorization": f"Key {self.api_key}", diff --git a/data_diff/config.py b/data_diff/config.py index 1b091f074..3c4cbef99 100644 --- a/data_diff/config.py +++ b/data_diff/config.py @@ -99,7 +99,7 @@ def _apply_config(config: Dict[str, Any], run_name: str, kw: Dict[str, Any]): _ENV_VAR_PATTERN = r"\$\{([A-Za-z0-9_]+)\}" -def _resolve_env(config: Dict[str, Any]): +def _resolve_env(config: Dict[str, Any]) -> None: """ Resolve environment variables referenced as ${ENV_VAR_NAME}. Missing environment variables are replaced with an empty string. diff --git a/data_diff/databases/_connect.py b/data_diff/databases/_connect.py index 1e34ef62b..df63c78bc 100644 --- a/data_diff/databases/_connect.py +++ b/data_diff/databases/_connect.py @@ -100,7 +100,7 @@ class Connect: database_by_scheme: Dict[str, Database] conn_cache: MutableMapping[Hashable, Database] - def __init__(self, database_by_scheme: Dict[str, Database] = DATABASE_BY_SCHEME): + def __init__(self, database_by_scheme: Dict[str, Database] = DATABASE_BY_SCHEME) -> None: super().__init__() self.database_by_scheme = database_by_scheme self.conn_cache = weakref.WeakValueDictionary() diff --git a/data_diff/databases/base.py b/data_diff/databases/base.py index 059854a5f..56e64076d 100644 --- a/data_diff/databases/base.py +++ b/data_diff/databases/base.py @@ -5,7 +5,22 @@ import math import sys import logging -from typing import Any, Callable, ClassVar, Dict, Generator, Tuple, Optional, Sequence, Type, List, Union, TypeVar +from typing import ( + Any, + Callable, + ClassVar, + Dict, + Generator, + Iterator, + NewType, + Tuple, + Optional, + Sequence, + Type, + List, + Union, + TypeVar, +) from functools import partial, wraps from concurrent.futures import ThreadPoolExecutor import threading @@ -19,7 +34,8 @@ from data_diff.abcs.compiler import AbstractCompiler, Compilable from data_diff.queries.extras import ApplyFuncAndNormalizeAsString, Checksum, NormalizeAsString -from data_diff.utils import ArithString, is_uuid, join_iter, safezip +from data_diff.schema import RawColumnInfo +from data_diff.utils import ArithString, ArithUUID, is_uuid, join_iter, safezip from data_diff.queries.api import Expr, table, Select, SKIP, Explain, Code, this from data_diff.queries.ast_classes import ( Alias, @@ -115,7 +131,7 @@ def dialect(self) -> "BaseDialect": def compile(self, elem, params=None) -> str: return self.dialect.compile(self, elem, params) - def new_unique_name(self, prefix="tmp"): + def new_unique_name(self, prefix="tmp") -> str: self._counter[0] += 1 return f"{prefix}{self._counter[0]}" @@ -172,7 +188,7 @@ class ThreadLocalInterpreter: compiler: Compiler gen: Generator - def apply_queries(self, callback: Callable[[str], Any]): + def apply_queries(self, callback: Callable[[str], Any]) -> None: q: Expr = next(self.gen) while True: sql = self.compiler.database.dialect.compile(self.compiler, q) @@ -201,6 +217,7 @@ class BaseDialect(abc.ABC): SUPPORTS_INDEXES: ClassVar[bool] = False PREVENT_OVERFLOW_WHEN_CONCAT: ClassVar[bool] = False TYPE_CLASSES: ClassVar[Dict[str, Type[ColType]]] = {} + DEFAULT_NUMERIC_PRECISION: ClassVar[int] = 0 # effective precision when type is just "NUMERIC" PLACEHOLDER_TABLE = None # Used for Oracle @@ -247,6 +264,9 @@ def _compile(self, compiler: Compiler, elem) -> str: return self.timestamp_value(elem) elif isinstance(elem, bytes): return f"b'{elem.decode()}'" + elif isinstance(elem, ArithUUID): + s = f"'{elem.uuid}'" + return s.upper() if elem.uppercase else s.lower() if elem.lowercase else s elif isinstance(elem, ArithString): return f"'{elem}'" assert False, elem @@ -680,8 +700,10 @@ def _constant_value(self, v): return f"'{v}'" elif isinstance(v, datetime): return self.timestamp_value(v) - elif isinstance(v, UUID): + elif isinstance(v, UUID): # probably unused anymore in favour of ArithUUID return f"'{v}'" + elif isinstance(v, ArithUUID): + return f"'{v.uuid}'" elif isinstance(v, decimal.Decimal): return str(v) elif isinstance(v, bytearray): @@ -707,27 +729,18 @@ def type_repr(self, t) -> str: datetime: "TIMESTAMP", }[t] - def _parse_type_repr(self, type_repr: str) -> Optional[Type[ColType]]: - return self.TYPE_CLASSES.get(type_repr) - - def parse_type( - self, - table_path: DbPath, - col_name: str, - type_repr: str, - datetime_precision: int = None, - numeric_precision: int = None, - numeric_scale: int = None, - ) -> ColType: + def parse_type(self, table_path: DbPath, info: RawColumnInfo) -> ColType: "Parse type info as returned by the database" - cls = self._parse_type_repr(type_repr) + cls = self.TYPE_CLASSES.get(info.data_type) if cls is None: - return UnknownColType(type_repr) + return UnknownColType(info.data_type) if issubclass(cls, TemporalType): return cls( - precision=datetime_precision if datetime_precision is not None else DEFAULT_DATETIME_PRECISION, + precision=info.datetime_precision + if info.datetime_precision is not None + else DEFAULT_DATETIME_PRECISION, rounds=self.ROUNDS_ON_PREC_LOSS, ) @@ -738,22 +751,22 @@ def parse_type( return cls() elif issubclass(cls, Decimal): - if numeric_scale is None: - numeric_scale = 0 # Needed for Oracle. - return cls(precision=numeric_scale) + if info.numeric_scale is None: + return cls(precision=0) # Needed for Oracle. + return cls(precision=info.numeric_scale) elif issubclass(cls, Float): # assert numeric_scale is None return cls( precision=self._convert_db_precision_to_digits( - numeric_precision if numeric_precision is not None else DEFAULT_NUMERIC_PRECISION + info.numeric_precision if info.numeric_precision is not None else DEFAULT_NUMERIC_PRECISION ) ) elif issubclass(cls, (JSON, Array, Struct, Text, Native_UUID)): return cls() - raise TypeError(f"Parsing {type_repr} returned an unknown type '{cls}'.") + raise TypeError(f"Parsing {info.data_type} returned an unknown type {cls!r}.") def _convert_db_precision_to_digits(self, p: int) -> int: """Convert from binary precision, used by floats, to decimal precision.""" @@ -887,20 +900,21 @@ def optimizer_hints(self, hints: str) -> str: T = TypeVar("T", bound=BaseDialect) +Row = Sequence[Any] @attrs.define(frozen=True) class QueryResult: - rows: list + rows: List[Row] columns: Optional[list] = None - def __iter__(self): + def __iter__(self) -> Iterator[Row]: return iter(self.rows) - def __len__(self): + def __len__(self) -> int: return len(self.rows) - def __getitem__(self, i): + def __getitem__(self, i) -> Row: return self.rows[i] @@ -924,6 +938,12 @@ class Database(abc.ABC): is_closed: bool = False _dialect: BaseDialect = None + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + @property def name(self): return type(self).__name__ @@ -1018,7 +1038,7 @@ def select_table_schema(self, path: DbPath) -> str: f"WHERE table_name = '{name}' AND table_schema = '{schema}'" ) - def query_table_schema(self, path: DbPath) -> Dict[str, tuple]: + def query_table_schema(self, path: DbPath) -> Dict[str, RawColumnInfo]: """Query the table for its schema for table in 'path', and return {column: tuple} where the tuple is (table_name, col_name, type_repr, datetime_precision?, numeric_precision?, numeric_scale?) @@ -1026,15 +1046,27 @@ def query_table_schema(self, path: DbPath) -> Dict[str, tuple]: accessing the schema using a SQL query. """ rows = self.query(self.select_table_schema(path), list, log_message=path) + if not rows: raise RuntimeError(f"{self.name}: Table '{'.'.join(path)}' does not exist, or has no columns") - d = {r[0]: r for r in rows} + d = { + r[0]: RawColumnInfo( + column_name=r[0], + data_type=r[1], + datetime_precision=r[2], + numeric_precision=r[3], + numeric_scale=r[4], + collation_name=r[5] if len(r) > 5 else None, + ) + for r in rows + } + assert len(d) == len(rows) return d def select_table_unique_columns(self, path: DbPath) -> str: - "Provide SQL for selecting the names of unique columns in the table" + """Provide SQL for selecting the names of unique columns in the table""" schema, name = self._normalize_table_path(path) return ( @@ -1051,7 +1083,11 @@ def query_table_unique_columns(self, path: DbPath) -> List[str]: return list(res) def _process_table_schema( - self, path: DbPath, raw_schema: Dict[str, tuple], filter_columns: Sequence[str] = None, where: str = None + self, + path: DbPath, + raw_schema: Dict[str, RawColumnInfo], + filter_columns: Sequence[str] = None, + where: str = None, ): """Process the result of query_table_schema(). @@ -1067,7 +1103,7 @@ def _process_table_schema( accept = {i.lower() for i in filter_columns} filtered_schema = {name: row for name, row in raw_schema.items() if name.lower() in accept} - col_dict = {row[0]: self.dialect.parse_type(path, *row) for _name, row in filtered_schema.items()} + col_dict = {info.column_name: self.dialect.parse_type(path, info) for info in filtered_schema.values()} self._refine_coltypes(path, col_dict, where) @@ -1076,7 +1112,7 @@ def _process_table_schema( def _refine_coltypes( self, table_path: DbPath, col_dict: Dict[str, ColType], where: Optional[str] = None, sample_size=64 - ): + ) -> Dict[str, ColType]: """Refine the types in the column dict, by querying the database for a sample of their values 'where' restricts the rows to be sampled. @@ -1084,7 +1120,7 @@ def _refine_coltypes( text_columns = [k for k, v in col_dict.items() if isinstance(v, Text)] if not text_columns: - return + return col_dict fields = [Code(self.dialect.normalize_uuid(self.dialect.quote(c), String_UUID())) for c in text_columns] @@ -1104,7 +1140,10 @@ def _refine_coltypes( ) else: assert col_name in col_dict - col_dict[col_name] = String_UUID() + col_dict[col_name] = String_UUID( + lowercase=all(s == s.lower() for s in uuid_samples), + uppercase=all(s == s.upper() for s in uuid_samples), + ) continue if self.SUPPORTS_ALPHANUMS: # Anything but MySQL (so far) @@ -1116,7 +1155,9 @@ def _refine_coltypes( ) else: assert col_name in col_dict - col_dict[col_name] = String_VaryingAlphanum() + col_dict[col_name] = String_VaryingAlphanum(collation=col_dict[col_name].collation) + + return col_dict def _normalize_table_path(self, path: DbPath) -> DbPath: if len(path) == 1: @@ -1147,9 +1188,8 @@ def _query_conn(self, conn, sql_code: Union[str, ThreadLocalInterpreter]) -> Que return apply_query(callback, sql_code) def close(self): - "Close connection(s) to the database instance. Querying will stop functioning." + """Close connection(s) to the database instance. Querying will stop functioning.""" self.is_closed = True - return super().close() @property def dialect(self) -> BaseDialect: @@ -1192,7 +1232,7 @@ class ThreadedDatabase(Database): _queue: Optional[ThreadPoolExecutor] = None thread_local: threading.local = attrs.field(factory=threading.local) - def __attrs_post_init__(self): + def __attrs_post_init__(self) -> None: self._queue = ThreadPoolExecutor(self.thread_count, initializer=self.set_conn) logger.info(f"[{self.name}] Starting a threadpool, size={self.thread_count}.") @@ -1208,18 +1248,20 @@ def _query(self, sql_code: Union[str, ThreadLocalInterpreter]) -> QueryResult: return r.result() def _query_in_worker(self, sql_code: Union[str, ThreadLocalInterpreter]): - "This method runs in a worker thread" + """This method runs in a worker thread""" if self._init_error: raise self._init_error return self._query_conn(self.thread_local.conn, sql_code) @abstractmethod def create_connection(self): - "Return a connection instance, that supports the .cursor() method." + """Return a connection instance, that supports the .cursor() method.""" def close(self): super().close() self._queue.shutdown() + if hasattr(self.thread_local, "conn"): + self.thread_local.conn.close() @property def is_autocommit(self) -> bool: diff --git a/data_diff/databases/bigquery.py b/data_diff/databases/bigquery.py index 26d8aec3d..2a8797e9d 100644 --- a/data_diff/databases/bigquery.py +++ b/data_diff/databases/bigquery.py @@ -19,6 +19,8 @@ TemporalType, Boolean, UnknownColType, + Time, + Date, ) from data_diff.databases.base import ( BaseDialect, @@ -33,6 +35,7 @@ MD5_HEXDIGITS, ) from data_diff.databases.base import TIMESTAMP_PRECISION_POS, ThreadLocalInterpreter +from data_diff.schema import RawColumnInfo @import_helper(text="Please install BigQuery and configure your google-cloud access.") @@ -62,6 +65,8 @@ class Dialect(BaseDialect): # Dates "TIMESTAMP": Timestamp, "DATETIME": Datetime, + "DATE": Date, + "TIME": Time, # Numbers "INT64": Integer, "INT32": Integer, @@ -75,14 +80,19 @@ class Dialect(BaseDialect): } TYPE_ARRAY_RE = re.compile(r"ARRAY<(.+)>") TYPE_STRUCT_RE = re.compile(r"STRUCT<(.+)>") + # [BIG]NUMERIC, [BIG]NUMERIC(precision, scale), [BIG]NUMERIC(precision) + TYPE_NUMERIC_RE = re.compile(r"^((BIG)?NUMERIC)(?:\((\d+)(?:, (\d+))?\))?$") + # https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#parameterized_decimal_type + # The default scale is 9, which means a number can have up to 9 digits after the decimal point. + DEFAULT_NUMERIC_PRECISION = 9 def random(self) -> str: return "RAND()" - def quote(self, s: str): + def quote(self, s: str) -> str: return f"`{s}`" - def to_string(self, s: str): + def to_string(self, s: str) -> str: return f"cast({s} as string)" def type_repr(self, t) -> str: @@ -91,29 +101,45 @@ def type_repr(self, t) -> str: except KeyError: return super().type_repr(t) - def parse_type( - self, - table_path: DbPath, - col_name: str, - type_repr: str, - *args: Any, # pass-through args - **kwargs: Any, # pass-through args - ) -> ColType: - col_type = super().parse_type(table_path, col_name, type_repr, *args, **kwargs) - if isinstance(col_type, UnknownColType): - m = self.TYPE_ARRAY_RE.fullmatch(type_repr) - if m: - item_type = self.parse_type(table_path, col_name, m.group(1), *args, **kwargs) - col_type = Array(item_type=item_type) - - # We currently ignore structs' structure, but later can parse it too. Examples: - # - STRUCT (unnamed) - # - STRUCT (named) - # - STRUCT> (with complex fields) - # - STRUCT> (nested) - m = self.TYPE_STRUCT_RE.fullmatch(type_repr) - if m: - col_type = Struct() + def parse_type(self, table_path: DbPath, info: RawColumnInfo) -> ColType: + col_type = super().parse_type(table_path, info) + if not isinstance(col_type, UnknownColType): + return col_type + + m = self.TYPE_ARRAY_RE.fullmatch(info.data_type) + if m: + item_info = attrs.evolve(info, data_type=m.group(1)) + item_type = self.parse_type(table_path, item_info) + col_type = Array(item_type=item_type) + return col_type + + # We currently ignore structs' structure, but later can parse it too. Examples: + # - STRUCT (unnamed) + # - STRUCT (named) + # - STRUCT> (with complex fields) + # - STRUCT> (nested) + m = self.TYPE_STRUCT_RE.fullmatch(info.data_type) + if m: + col_type = Struct() + return col_type + + m = self.TYPE_NUMERIC_RE.fullmatch(info.data_type) + if m: + precision = int(m.group(3)) if m.group(3) else None + scale = int(m.group(4)) if m.group(4) else None + + if scale is not None: + # NUMERIC(..., scale) β€” scale is set explicitly + effective_precision = scale + elif precision is not None: + # NUMERIC(...) β€”Β scale is missing but precision is set + # effectively the same as NUMERIC(..., 0) + effective_precision = 0 + else: + # NUMERIC β†’ default scale is 9 + effective_precision = 9 + col_type = Decimal(precision=effective_precision) + return col_type return col_type @@ -138,6 +164,21 @@ def md5_as_hex(self, s: str) -> str: return f"md5({s})" def normalize_timestamp(self, value: str, coltype: TemporalType) -> str: + try: + is_date = coltype.is_date + is_time = coltype.is_time + except: + is_date = False + is_time = False + if isinstance(coltype, Date) or is_date: + return f"FORMAT_DATE('%F', {value})" + if isinstance(coltype, Time) or is_time: + microseconds = f"TIME_DIFF( {value}, cast('00:00:00' as time), microsecond)" + rounded = f"ROUND({microseconds}, -6 + {coltype.precision})" + time_value = f"TIME_ADD(cast('00:00:00' as time), interval cast({rounded} as int64) microsecond)" + converted = f"FORMAT_TIME('%H:%M:%E6S', {time_value})" + return converted + if coltype.rounds: timestamp = f"timestamp_micros(cast(round(unix_micros(cast({value} as timestamp))/1000000, {coltype.precision})*1000000 as int))" return f"FORMAT_TIMESTAMP('%F %H:%M:%E6S', {timestamp})" @@ -190,7 +231,7 @@ class BigQuery(Database): dataset: str _client: Any - def __init__(self, project, *, dataset, bigquery_credentials=None, **kw): + def __init__(self, project, *, dataset, bigquery_credentials=None, **kw) -> None: super().__init__() credentials = bigquery_credentials bigquery = import_bigquery() diff --git a/data_diff/databases/clickhouse.py b/data_diff/databases/clickhouse.py index 13082504b..43b69b3fa 100644 --- a/data_diff/databases/clickhouse.py +++ b/data_diff/databases/clickhouse.py @@ -14,6 +14,7 @@ ) from data_diff.abcs.database_types import ( ColType, + DbPath, Decimal, Float, Integer, @@ -24,6 +25,7 @@ Timestamp, Boolean, ) +from data_diff.schema import RawColumnInfo # https://clickhouse.com/docs/en/operations/server-configuration-parameters/settings/#default-database DEFAULT_DATABASE = "default" @@ -75,19 +77,19 @@ def _convert_db_precision_to_digits(self, p: int) -> int: # because it does not help for float with a big integer part. return super()._convert_db_precision_to_digits(p) - 2 - def _parse_type_repr(self, type_repr: str) -> Optional[Type[ColType]]: + def parse_type(self, table_path: DbPath, info: RawColumnInfo) -> ColType: nullable_prefix = "Nullable(" - if type_repr.startswith(nullable_prefix): - type_repr = type_repr[len(nullable_prefix) :].rstrip(")") + if info.data_type.startswith(nullable_prefix): + info = attrs.evolve(info, data_type=info.data_type[len(nullable_prefix) :].rstrip(")")) - if type_repr.startswith("Decimal"): - type_repr = "Decimal" - elif type_repr.startswith("FixedString"): - type_repr = "FixedString" - elif type_repr.startswith("DateTime64"): - type_repr = "DateTime64" + if info.data_type.startswith("Decimal"): + info = attrs.evolve(info, data_type="Decimal") + elif info.data_type.startswith("FixedString"): + info = attrs.evolve(info, data_type="FixedString") + elif info.data_type.startswith("DateTime64"): + info = attrs.evolve(info, data_type="DateTime64") - return self.TYPE_CLASSES.get(type_repr) + return super().parse_type(table_path, info) # def timestamp_value(self, t: DbTime) -> str: # # return f"'{t}'" @@ -173,7 +175,7 @@ class Clickhouse(ThreadedDatabase): _args: Dict[str, Any] - def __init__(self, *, thread_count: int, **kw): + def __init__(self, *, thread_count: int, **kw) -> None: super().__init__(thread_count=thread_count) self._args = kw diff --git a/data_diff/databases/databricks.py b/data_diff/databases/databricks.py index c755cfa9b..580de4894 100644 --- a/data_diff/databases/databricks.py +++ b/data_diff/databases/databricks.py @@ -5,6 +5,7 @@ import attrs from data_diff.abcs.database_types import ( + Date, Integer, Float, Decimal, @@ -26,6 +27,7 @@ import_helper, parse_table_name, ) +from data_diff.schema import RawColumnInfo @import_helper(text="You can install it using 'pip install databricks-sql-connector'") @@ -51,6 +53,7 @@ class Dialect(BaseDialect): # Timestamps "TIMESTAMP": Timestamp, "TIMESTAMP_NTZ": Timestamp, + "DATE": Date, # Text "STRING": Text, "VARCHAR": Text, @@ -64,7 +67,7 @@ def type_repr(self, t) -> str: except KeyError: return super().type_repr(t) - def quote(self, s: str): + def quote(self, s: str) -> str: return f"`{s}`" def to_string(self, s: str) -> str: @@ -89,7 +92,12 @@ def md5_as_hex(self, s: str) -> str: def normalize_timestamp(self, value: str, coltype: TemporalType) -> str: """Databricks timestamp contains no more than 6 digits in precision""" - + try: + is_date = coltype.is_date + except: + is_date = False + if isinstance(coltype, Date) or is_date: + return f"date_format({value}, 'yyyy-MM-dd')" if coltype.rounds: # cast to timestamp due to unix_micros() requiring timestamp timestamp = f"cast(round(unix_micros(cast({value} as timestamp)) / 1000000, {coltype.precision}) * 1000000 as bigint)" @@ -117,7 +125,7 @@ class Databricks(ThreadedDatabase): catalog: str _args: Dict[str, Any] - def __init__(self, *, thread_count, **kw): + def __init__(self, *, thread_count, **kw) -> None: super().__init__(thread_count=thread_count) logging.getLogger("databricks.sql").setLevel(logging.WARNING) @@ -138,7 +146,7 @@ def create_connection(self): except databricks.sql.exc.Error as e: raise ConnectionError(*e.args) from e - def query_table_schema(self, path: DbPath) -> Dict[str, tuple]: + def query_table_schema(self, path: DbPath) -> Dict[str, RawColumnInfo]: # Databricks has INFORMATION_SCHEMA only for Databricks Runtime, not for Databricks SQL. # https://docs.databricks.com/spark/latest/spark-sql/language-manual/information-schema/columns.html # So, to obtain information about schema, we should use another approach. @@ -155,7 +163,12 @@ def query_table_schema(self, path: DbPath) -> Dict[str, tuple]: if not rows: raise RuntimeError(f"{self.name}: Table '{'.'.join(path)}' does not exist, or has no columns") - d = {r.COLUMN_NAME: (r.COLUMN_NAME, r.TYPE_NAME, r.DECIMAL_DIGITS, None, None) for r in rows} + d = { + r.COLUMN_NAME: RawColumnInfo( + column_name=r.COLUMN_NAME, data_type=r.TYPE_NAME, numeric_precision=r.DECIMAL_DIGITS + ) + for r in rows + } assert len(d) == len(rows) return d @@ -173,37 +186,47 @@ def query_table_schema(self, path: DbPath) -> Dict[str, tuple]: # ) def _process_table_schema( - self, path: DbPath, raw_schema: Dict[str, tuple], filter_columns: Sequence[str], where: str = None + self, path: DbPath, raw_schema: Dict[str, RawColumnInfo], filter_columns: Sequence[str], where: str = None ): accept = {i.lower() for i in filter_columns} - rows = [row for name, row in raw_schema.items() if name.lower() in accept] + col_infos = [row for name, row in raw_schema.items() if name.lower() in accept] resulted_rows = [] - for row in rows: - row_type = "DECIMAL" if row[1].startswith("DECIMAL") else row[1] + for info in col_infos: + raw_data_type = info.data_type + row_type = info.data_type.split("(")[0] + info = attrs.evolve(info, data_type=row_type) type_cls = self.dialect.TYPE_CLASSES.get(row_type, UnknownColType) if issubclass(type_cls, Integer): - row = (row[0], row_type, None, None, 0) + info = attrs.evolve(info, numeric_scale=0) elif issubclass(type_cls, Float): - numeric_precision = math.ceil(row[2] / math.log(2, 10)) - row = (row[0], row_type, None, numeric_precision, None) + numeric_precision = math.ceil(info.numeric_precision / math.log(2, 10)) + info = attrs.evolve(info, numeric_precision=numeric_precision) elif issubclass(type_cls, Decimal): - items = row[1][8:].rstrip(")").split(",") + items = raw_data_type[8:].rstrip(")").split(",") numeric_precision, numeric_scale = int(items[0]), int(items[1]) - row = (row[0], row_type, None, numeric_precision, numeric_scale) + info = attrs.evolve( + info, + numeric_precision=numeric_precision, + numeric_scale=numeric_scale, + ) elif issubclass(type_cls, Timestamp): - row = (row[0], row_type, row[2], None, None) + info = attrs.evolve( + info, + datetime_precision=info.numeric_precision, + numeric_precision=None, + ) else: - row = (row[0], row_type, None, None, None) + info = attrs.evolve(info, numeric_precision=None) - resulted_rows.append(row) + resulted_rows.append(info) - col_dict: Dict[str, ColType] = {row[0]: self.dialect.parse_type(path, *row) for row in resulted_rows} + col_dict: Dict[str, ColType] = {info.column_name: self.dialect.parse_type(path, info) for info in resulted_rows} self._refine_coltypes(path, col_dict, where) return col_dict diff --git a/data_diff/databases/duckdb.py b/data_diff/databases/duckdb.py index d057e0d90..9537ce50e 100644 --- a/data_diff/databases/duckdb.py +++ b/data_diff/databases/duckdb.py @@ -3,6 +3,7 @@ import attrs from packaging.version import parse as parse_version +from data_diff.schema import RawColumnInfo from data_diff.utils import match_regexps from data_diff.abcs.database_types import ( Timestamp, @@ -45,6 +46,10 @@ class Dialect(BaseDialect): SUPPORTS_PRIMARY_KEY = True SUPPORTS_INDEXES = True + # https://duckdb.org/docs/sql/data_types/numeric#fixed-point-decimals + # The default WIDTH and SCALE is DECIMAL(18, 3), if none are specified. + DEFAULT_NUMERIC_PRECISION = 3 + TYPE_CLASSES = { # Timestamps "TIMESTAMP WITH TIME ZONE": TimestampTZ, @@ -74,24 +79,16 @@ def _convert_db_precision_to_digits(self, p: int) -> int: # Subtracting 2 due to wierd precision issues in PostgreSQL return super()._convert_db_precision_to_digits(p) - 2 - def parse_type( - self, - table_path: DbPath, - col_name: str, - type_repr: str, - datetime_precision: int = None, - numeric_precision: int = None, - numeric_scale: int = None, - ) -> ColType: + def parse_type(self, table_path: DbPath, info: RawColumnInfo) -> ColType: regexps = { r"DECIMAL\((\d+),(\d+)\)": Decimal, } - for m, t_cls in match_regexps(regexps, type_repr): + for m, t_cls in match_regexps(regexps, info.data_type): precision = int(m.group(2)) return t_cls(precision=precision) - return super().parse_type(table_path, col_name, type_repr, datetime_precision, numeric_precision, numeric_scale) + return super().parse_type(table_path, info) def set_timezone_to_utc(self) -> str: return "SET GLOBAL TimeZone='UTC'" @@ -129,7 +126,7 @@ class DuckDB(Database): _args: Dict[str, Any] = attrs.field(init=False) _conn: Any = attrs.field(init=False) - def __init__(self, **kw): + def __init__(self, **kw) -> None: super().__init__() self._args = kw self._conn = self.create_connection() @@ -170,12 +167,16 @@ def select_table_schema(self, path: DbPath) -> str: database, schema, table = self._normalize_table_path(path) info_schema_path = ["information_schema", "columns"] + if database: info_schema_path.insert(0, database) + dynamic_database_clause = f"'{database}'" + else: + dynamic_database_clause = "current_catalog()" return ( f"SELECT column_name, data_type, datetime_precision, numeric_precision, numeric_scale FROM {'.'.join(info_schema_path)} " - f"WHERE table_name = '{table}' AND table_schema = '{schema}'" + f"WHERE table_name = '{table}' AND table_schema = '{schema}' and table_catalog = {dynamic_database_clause}" ) def _normalize_table_path(self, path: DbPath) -> DbPath: diff --git a/data_diff/databases/mssql.py b/data_diff/databases/mssql.py index 834ed9cd3..59d8a49d6 100644 --- a/data_diff/databases/mssql.py +++ b/data_diff/databases/mssql.py @@ -24,6 +24,8 @@ Native_UUID, Text, Boolean, + Date, + Time, ) @@ -46,7 +48,8 @@ class Dialect(BaseDialect): "datetime": Timestamp, "datetime2": Timestamp, "smalldatetime": Timestamp, - "date": Timestamp, + "date": Date, + "time": Time, # Numbers "float": Float, "real": Float, @@ -76,7 +79,7 @@ class Dialect(BaseDialect): "json": JSON, } - def quote(self, s: str): + def quote(self, s: str) -> str: return f"[{s}]" def set_timezone_to_utc(self) -> str: @@ -93,8 +96,9 @@ def current_schema(self) -> str: FROM sys.database_principals WHERE name = CURRENT_USER""" - def to_string(self, s: str): - return f"CONVERT(varchar, {s})" + def to_string(self, s: str) -> str: + # Both convert(varchar(max), …) and convert(text, …) do work. + return f"CONVERT(VARCHAR(MAX), {s})" def type_repr(self, t) -> str: try: @@ -167,7 +171,7 @@ class MsSQL(ThreadedDatabase): _args: Dict[str, Any] _mssql: Any - def __init__(self, host, port, user, password, *, database, thread_count, **kw): + def __init__(self, host, port, user, password, *, database, thread_count, **kw) -> None: super().__init__(thread_count=thread_count) args = dict(server=host, port=port, database=database, user=user, password=password, **kw) @@ -201,7 +205,7 @@ def select_table_schema(self, path: DbPath) -> str: info_schema_path.insert(0, self.dialect.quote(database)) return ( - "SELECT column_name, data_type, datetime_precision, numeric_precision, numeric_scale " + "SELECT column_name, data_type, datetime_precision, numeric_precision, numeric_scale, collation_name " f"FROM {'.'.join(info_schema_path)} " f"WHERE table_name = '{name}' AND table_schema = '{schema}'" ) diff --git a/data_diff/databases/mysql.py b/data_diff/databases/mysql.py index 647388f26..1ee04460b 100644 --- a/data_diff/databases/mysql.py +++ b/data_diff/databases/mysql.py @@ -1,4 +1,4 @@ -from typing import Any, ClassVar, Dict, Type +from typing import Any, ClassVar, Dict, Type, Union import attrs @@ -20,6 +20,7 @@ import_helper, ConnectError, BaseDialect, + ThreadLocalInterpreter, ) from data_diff.databases.base import ( MD5_HEXDIGITS, @@ -69,10 +70,10 @@ class Dialect(BaseDialect): "boolean": Boolean, } - def quote(self, s: str): + def quote(self, s: str) -> str: return f"`{s}`" - def to_string(self, s: str): + def to_string(self, s: str) -> str: return f"cast({s} as char)" def is_distinct_from(self, a: str, b: str) -> str: @@ -128,7 +129,7 @@ class MySQL(ThreadedDatabase): _args: Dict[str, Any] - def __init__(self, *, thread_count, **kw): + def __init__(self, *, thread_count, **kw) -> None: super().__init__(thread_count=thread_count) self._args = kw @@ -148,3 +149,11 @@ def create_connection(self): elif e.errno == mysql.errorcode.ER_BAD_DB_ERROR: raise ConnectError("Database does not exist") from e raise ConnectError(*e.args) from e + + def _query_in_worker(self, sql_code: Union[str, ThreadLocalInterpreter]): + "This method runs in a worker thread" + if self._init_error: + raise self._init_error + if not self.thread_local.conn.is_connected(): + self.thread_local.conn.ping(reconnect=True, attempts=3, delay=5) + return self._query_conn(self.thread_local.conn, sql_code) diff --git a/data_diff/databases/oracle.py b/data_diff/databases/oracle.py index ab84f0b6d..f5960476d 100644 --- a/data_diff/databases/oracle.py +++ b/data_diff/databases/oracle.py @@ -2,6 +2,7 @@ import attrs +from data_diff.schema import RawColumnInfo from data_diff.utils import match_regexps from data_diff.abcs.database_types import ( Decimal, @@ -58,10 +59,10 @@ class Dialect( ROUNDS_ON_PREC_LOSS = True PLACEHOLDER_TABLE = "DUAL" - def quote(self, s: str): + def quote(self, s: str) -> str: return f'"{s}"' - def to_string(self, s: str): + def to_string(self, s: str) -> str: return f"cast({s} as varchar(1024))" def limit_select( @@ -105,26 +106,18 @@ def constant_values(self, rows) -> str: def explain_as_text(self, query: str) -> str: raise NotImplementedError("Explain not yet implemented in Oracle") - def parse_type( - self, - table_path: DbPath, - col_name: str, - type_repr: str, - datetime_precision: int = None, - numeric_precision: int = None, - numeric_scale: int = None, - ) -> ColType: + def parse_type(self, table_path: DbPath, info: RawColumnInfo) -> ColType: regexps = { r"TIMESTAMP\((\d)\) WITH LOCAL TIME ZONE": Timestamp, r"TIMESTAMP\((\d)\) WITH TIME ZONE": TimestampTZ, r"TIMESTAMP\((\d)\)": Timestamp, } - for m, t_cls in match_regexps(regexps, type_repr): + for m, t_cls in match_regexps(regexps, info.data_type): precision = int(m.group(1)) return t_cls(precision=precision, rounds=self.ROUNDS_ON_PREC_LOSS) - return super().parse_type(table_path, col_name, type_repr, datetime_precision, numeric_precision, numeric_scale) + return super().parse_type(table_path, info) def set_timezone_to_utc(self) -> str: return "ALTER SESSION SET TIME_ZONE = 'UTC'" @@ -171,7 +164,7 @@ class Oracle(ThreadedDatabase): kwargs: Dict[str, Any] _oracle: Any - def __init__(self, *, host, database, thread_count, **kw): + def __init__(self, *, host, database, thread_count, **kw) -> None: super().__init__(thread_count=thread_count) self.kwargs = dict(dsn=f"{host}/{database}" if database else host, **kw) self.default_schema = kw.get("user").upper() diff --git a/data_diff/databases/postgresql.py b/data_diff/databases/postgresql.py index d29fa0eb5..d93a46d70 100644 --- a/data_diff/databases/postgresql.py +++ b/data_diff/databases/postgresql.py @@ -1,5 +1,5 @@ from typing import Any, ClassVar, Dict, List, Type - +from urllib.parse import unquote import attrs from data_diff.abcs.database_types import ( @@ -17,6 +17,7 @@ FractionalType, Boolean, Date, + Time, ) from data_diff.databases.base import BaseDialect, ThreadedDatabase, import_helper, ConnectError from data_diff.databases.base import ( @@ -45,12 +46,20 @@ class PostgresqlDialect(BaseDialect): SUPPORTS_PRIMARY_KEY: ClassVar[bool] = True SUPPORTS_INDEXES = True + # https://www.postgresql.org/docs/current/datatype-numeric.html#DATATYPE-NUMERIC-DECIMAL + # without any precision or scale creates an β€œunconstrained numeric” column + # in which numeric values of any length can be stored, up to the implementation limits. + # https://www.postgresql.org/docs/current/datatype-numeric.html#DATATYPE-NUMERIC-TABLE + DEFAULT_NUMERIC_PRECISION = 16383 + TYPE_CLASSES: ClassVar[Dict[str, Type[ColType]]] = { # Timestamps "timestamp with time zone": TimestampTZ, "timestamp without time zone": Timestamp, "timestamp": Timestamp, "date": Date, + "time with time zone": Time, + "time without time zone": Time, # Numbers "double precision": Float, "real": Float, @@ -105,6 +114,23 @@ def normalize_timestamp(self, value: str, coltype: TemporalType) -> str: def _add_padding(coltype: TemporalType, timestamp6: str): return f"RPAD(LEFT({timestamp6}, {TIMESTAMP_PRECISION_POS+coltype.precision}), {TIMESTAMP_PRECISION_POS+6}, '0')" + try: + is_date = coltype.is_date + is_time = coltype.is_time + except: + is_date = False + is_time = False + + if isinstance(coltype, Date) or is_date: + return f"cast({value} as varchar)" + + if isinstance(coltype, Time) or is_time: + seconds = f"EXTRACT( epoch from {value})" + rounded = f"ROUND({seconds}, {coltype.precision})" + time_value = f"CAST('00:00:00' as time) + make_interval(0, 0, 0, 0, 0, 0, {rounded})" # 6th arg = seconds + converted = f"to_char({time_value}, 'hh24:mi:ss.ff6')" + return converted + if coltype.rounds: # NULL value expected to return NULL after normalization null_case_begin = f"CASE WHEN {value} IS NULL THEN NULL ELSE " @@ -157,7 +183,7 @@ class PostgreSQL(ThreadedDatabase): _args: Dict[str, Any] _conn: Any - def __init__(self, *, thread_count, **kw): + def __init__(self, *, thread_count, **kw) -> None: super().__init__(thread_count=thread_count) self._args = kw self.default_schema = "public" @@ -168,6 +194,7 @@ def create_connection(self): pg = import_postgresql() try: + self._args["password"] = unquote(self._args["password"]) self._conn = pg.connect( **self._args, keepalives=1, keepalives_idle=5, keepalives_interval=2, keepalives_count=2 ) @@ -184,10 +211,21 @@ def select_table_schema(self, path: DbPath) -> str: if database: info_schema_path.insert(0, database) - return ( - f"SELECT column_name, data_type, datetime_precision, numeric_precision, numeric_scale FROM {'.'.join(info_schema_path)} " - f"WHERE table_name = '{table}' AND table_schema = '{schema}'" - ) + return f"""SELECT column_name, data_type, datetime_precision, + -- see comment for DEFAULT_NUMERIC_PRECISION + CASE + WHEN data_type = 'numeric' + THEN coalesce(numeric_precision, 131072 + {self.dialect.DEFAULT_NUMERIC_PRECISION}) + ELSE numeric_precision + END AS numeric_precision, + CASE + WHEN data_type = 'numeric' + THEN coalesce(numeric_scale, {self.dialect.DEFAULT_NUMERIC_PRECISION}) + ELSE numeric_scale + END AS numeric_scale + FROM {'.'.join(info_schema_path)} + WHERE table_name = '{table}' AND table_schema = '{schema}' + """ def select_table_unique_columns(self, path: DbPath) -> str: database, schema, table = self._normalize_table_path(path) @@ -213,3 +251,8 @@ def _normalize_table_path(self, path: DbPath) -> DbPath: raise ValueError( f"{self.name}: Bad table path for {self}: '{'.'.join(path)}'. Expected format: table, schema.table, or database.schema.table" ) + + def close(self): + super().close() + if self._conn is not None: + self._conn.close() diff --git a/data_diff/databases/presto.py b/data_diff/databases/presto.py index ba1c73606..42e28056f 100644 --- a/data_diff/databases/presto.py +++ b/data_diff/databases/presto.py @@ -4,6 +4,7 @@ import attrs +from data_diff.schema import RawColumnInfo from data_diff.utils import match_regexps from data_diff.abcs.database_types import ( @@ -91,33 +92,25 @@ def quote(self, s: str): def to_string(self, s: str): return f"cast({s} as varchar)" - def parse_type( - self, - table_path: DbPath, - col_name: str, - type_repr: str, - datetime_precision: int = None, - numeric_precision: int = None, - _numeric_scale: int = None, - ) -> ColType: + def parse_type(self, table_path: DbPath, info: RawColumnInfo) -> ColType: timestamp_regexps = { r"timestamp\((\d)\)": Timestamp, r"timestamp\((\d)\) with time zone": TimestampTZ, } - for m, t_cls in match_regexps(timestamp_regexps, type_repr): + for m, t_cls in match_regexps(timestamp_regexps, info.data_type): precision = int(m.group(1)) return t_cls(precision=precision, rounds=self.ROUNDS_ON_PREC_LOSS) number_regexps = {r"decimal\((\d+),(\d+)\)": Decimal} - for m, n_cls in match_regexps(number_regexps, type_repr): + for m, n_cls in match_regexps(number_regexps, info.data_type): _prec, scale = map(int, m.groups()) return n_cls(scale) string_regexps = {r"varchar\((\d+)\)": Text, r"char\((\d+)\)": Text} - for m, n_cls in match_regexps(string_regexps, type_repr): + for m, n_cls in match_regexps(string_regexps, info.data_type): return n_cls() - return super().parse_type(table_path, col_name, type_repr, datetime_precision, numeric_precision) + return super().parse_type(table_path, info) def set_timezone_to_utc(self) -> str: return "SET TIME ZONE '+00:00'" @@ -159,7 +152,7 @@ class Presto(Database): _conn: Any - def __init__(self, **kw): + def __init__(self, **kw) -> None: super().__init__() self.default_schema = "public" prestodb = import_presto() diff --git a/data_diff/databases/redshift.py b/data_diff/databases/redshift.py index 44e86b17a..356d31344 100644 --- a/data_diff/databases/redshift.py +++ b/data_diff/databases/redshift.py @@ -1,4 +1,4 @@ -from typing import ClassVar, List, Dict, Type +from typing import Any, ClassVar, Iterable, List, Dict, Tuple, Type import attrs @@ -10,6 +10,7 @@ FractionalType, DbPath, TimestampTZ, + Integer, ) from data_diff.databases.postgresql import ( BaseDialect, @@ -20,6 +21,7 @@ TIMESTAMP_PRECISION_POS, PostgresqlDialect, ) +from data_diff.schema import RawColumnInfo @attrs.define(frozen=False) @@ -30,6 +32,8 @@ class Dialect(PostgresqlDialect): "double": Float, "real": Float, "super": JSON, + "int": Integer, # Redshift Spectrum + "float": Float, # Redshift Spectrum } SUPPORTS_INDEXES = False @@ -96,13 +100,12 @@ def select_external_table_schema(self, path: DbPath) -> str: + db_clause ) - def query_external_table_schema(self, path: DbPath) -> Dict[str, tuple]: + def query_external_table_schema(self, path: DbPath) -> Dict[str, RawColumnInfo]: rows = self.query(self.select_external_table_schema(path), list) if not rows: raise RuntimeError(f"{self.name}: Table '{'.'.join(path)}' does not exist, or has no columns") schema_dict = self._normalize_schema_info(rows) - return schema_dict def select_view_columns(self, path: DbPath) -> str: @@ -112,14 +115,12 @@ def select_view_columns(self, path: DbPath) -> str: cols(col_name name, col_type varchar) """.format(schema, table) - def query_pg_get_cols(self, path: DbPath) -> Dict[str, tuple]: + def query_pg_get_cols(self, path: DbPath) -> Dict[str, RawColumnInfo]: rows = self.query(self.select_view_columns(path), list) - if not rows: raise RuntimeError(f"{self.name}: View '{'.'.join(path)}' does not exist, or has no columns") schema_dict = self._normalize_schema_info(rows) - return schema_dict def select_svv_columns_schema(self, path: DbPath) -> Dict[str, tuple]: @@ -145,19 +146,29 @@ def select_svv_columns_schema(self, path: DbPath) -> Dict[str, tuple]: + db_clause ) - def query_svv_columns(self, path: DbPath) -> Dict[str, tuple]: + def query_svv_columns(self, path: DbPath) -> Dict[str, RawColumnInfo]: rows = self.query(self.select_svv_columns_schema(path), list) if not rows: raise RuntimeError(f"{self.name}: Table '{'.'.join(path)}' does not exist, or has no columns") - d = {r[0]: r for r in rows} + d = { + r[0]: RawColumnInfo( + column_name=r[0], + data_type=r[1], + datetime_precision=r[2], + numeric_precision=r[3], + numeric_scale=r[4], + collation_name=r[5] if len(r) > 5 else None, + ) + for r in rows + } assert len(d) == len(rows) return d # when using a non-information_schema source, strip (N) from type(N) etc. to match # typical information_schema output - def _normalize_schema_info(self, rows) -> Dict[str, tuple]: - schema_dict = {} + def _normalize_schema_info(self, rows: Iterable[Tuple[Any]]) -> Dict[str, RawColumnInfo]: + schema_dict: Dict[str, RawColumnInfo] = {} for r in rows: col_name = r[0] type_info = r[1].split("(") @@ -171,11 +182,17 @@ def _normalize_schema_info(self, rows) -> Dict[str, tuple]: precision = int(precision) scale = int(scale) - out = [col_name, base_type, None, precision, scale] - schema_dict[col_name] = tuple(out) + schema_dict[col_name] = RawColumnInfo( + column_name=col_name, + data_type=base_type, + datetime_precision=None, + numeric_precision=precision, + numeric_scale=scale, + collation_name=None, + ) return schema_dict - def query_table_schema(self, path: DbPath) -> Dict[str, tuple]: + def query_table_schema(self, path: DbPath) -> Dict[str, RawColumnInfo]: try: return super().query_table_schema(path) except RuntimeError: diff --git a/data_diff/databases/snowflake.py b/data_diff/databases/snowflake.py index 4152a407c..2081e3429 100644 --- a/data_diff/databases/snowflake.py +++ b/data_diff/databases/snowflake.py @@ -15,6 +15,7 @@ DbPath, Boolean, Date, + Time, ) from data_diff.databases.base import ( BaseDialect, @@ -45,6 +46,7 @@ class Dialect(BaseDialect): "TIMESTAMP_LTZ": Timestamp, "TIMESTAMP_TZ": TimestampTZ, "DATE": Date, + "TIME": Time, # Numbers "NUMBER": Decimal, "FLOAT": Float, @@ -81,6 +83,21 @@ def md5_as_hex(self, s: str) -> str: return f"md5({s})" def normalize_timestamp(self, value: str, coltype: TemporalType) -> str: + try: + is_date = coltype.is_date + is_time = coltype.is_time + except: + is_date = False + is_time = False + if isinstance(coltype, Date) or is_date: + return f"({value}::varchar)" + elif isinstance(coltype, Time) or is_time: + microseconds = f"TIMEDIFF(microsecond, cast('00:00:00' as time), {value})" + rounded = f"round({microseconds}, -6 + {coltype.precision})" + time_value = f"TIMEADD(microsecond, {rounded}, cast('00:00:00' as time))" + converted = f"TO_VARCHAR({time_value}, 'HH24:MI:SS.FF6')" + return converted + if coltype.rounds: timestamp = f"to_timestamp(round(date_part(epoch_nanosecond, convert_timezone('UTC', {value})::timestamp(9))/1000000000, {coltype.precision}))" else: @@ -104,7 +121,7 @@ class Snowflake(Database): _conn: Any - def __init__(self, *, schema: str, key: Optional[str] = None, key_content: Optional[str] = None, **kw): + def __init__(self, *, schema: str, key: Optional[str] = None, key_content: Optional[str] = None, **kw) -> None: super().__init__() snowflake, serialization, default_backend = import_snowflake() logging.getLogger("snowflake.connector").setLevel(logging.WARNING) @@ -164,7 +181,8 @@ def select_table_schema(self, path: DbPath) -> str: info_schema_path.insert(0, database) return ( - "SELECT column_name, data_type, datetime_precision, numeric_precision, numeric_scale " + "SELECT column_name, data_type, datetime_precision, numeric_precision, numeric_scale" + " , coalesce(collation_name, 'utf8') " f"FROM {'.'.join(info_schema_path)} " f"WHERE table_name = '{name}' AND table_schema = '{schema}'" ) diff --git a/data_diff/databases/trino.py b/data_diff/databases/trino.py index b76ba74bb..5a432ee52 100644 --- a/data_diff/databases/trino.py +++ b/data_diff/databases/trino.py @@ -40,7 +40,7 @@ class Trino(presto.Presto): _conn: Any - def __init__(self, **kw): + def __init__(self, **kw) -> None: super().__init__() trino = import_trino() diff --git a/data_diff/databases/vertica.py b/data_diff/databases/vertica.py index 23f63accd..9b1de1f4d 100644 --- a/data_diff/databases/vertica.py +++ b/data_diff/databases/vertica.py @@ -2,6 +2,7 @@ import attrs +from data_diff.schema import RawColumnInfo from data_diff.utils import match_regexps from data_diff.databases.base import ( CHECKSUM_HEXDIGITS, @@ -56,7 +57,10 @@ class Dialect(BaseDialect): "boolean": Boolean, } - def quote(self, s: str): + # https://www.vertica.com/docs/9.3.x/HTML/Content/Authoring/SQLReferenceManual/DataTypes/Numeric/NUMERIC.htm#Default + DEFAULT_NUMERIC_PRECISION = 15 + + def quote(self, s: str) -> str: return f'"{s}"' def concat(self, items: List[str]) -> str: @@ -68,27 +72,19 @@ def to_string(self, s: str) -> str: def is_distinct_from(self, a: str, b: str) -> str: return f"not ({a} <=> {b})" - def parse_type( - self, - table_path: DbPath, - col_name: str, - type_repr: str, - datetime_precision: int = None, - numeric_precision: int = None, - numeric_scale: int = None, - ) -> ColType: + def parse_type(self, table_path: DbPath, info: RawColumnInfo) -> ColType: timestamp_regexps = { r"timestamp\(?(\d?)\)?": Timestamp, r"timestamptz\(?(\d?)\)?": TimestampTZ, } - for m, t_cls in match_regexps(timestamp_regexps, type_repr): + for m, t_cls in match_regexps(timestamp_regexps, info.data_type): precision = int(m.group(1)) if m.group(1) else 6 return t_cls(precision=precision, rounds=self.ROUNDS_ON_PREC_LOSS) number_regexps = { r"numeric\((\d+),(\d+)\)": Decimal, } - for m, n_cls in match_regexps(number_regexps, type_repr): + for m, n_cls in match_regexps(number_regexps, info.data_type): _prec, scale = map(int, m.groups()) return n_cls(scale) @@ -96,10 +92,10 @@ def parse_type( r"varchar\((\d+)\)": Text, r"char\((\d+)\)": Text, } - for m, n_cls in match_regexps(string_regexps, type_repr): + for m, n_cls in match_regexps(string_regexps, info.data_type): return n_cls() - return super().parse_type(table_path, col_name, type_repr, datetime_precision, numeric_precision) + return super().parse_type(table_path, info) def set_timezone_to_utc(self) -> str: return "SET TIME ZONE TO 'UTC'" @@ -141,7 +137,7 @@ class Vertica(ThreadedDatabase): _args: Dict[str, Any] - def __init__(self, *, thread_count, **kw): + def __init__(self, *, thread_count, **kw) -> None: super().__init__(thread_count=thread_count) self._args = kw self._args["AUTOCOMMIT"] = False @@ -150,8 +146,7 @@ def __init__(self, *, thread_count, **kw): def create_connection(self): vertica = import_vertica() try: - c = vertica.connect(**self._args) - return c + return vertica.connect(**self._args) except vertica.errors.ConnectionError as e: raise ConnectError(*e.args) from e diff --git a/data_diff/dbt.py b/data_diff/dbt.py index 92a1fdbe2..6e961a219 100644 --- a/data_diff/dbt.py +++ b/data_diff/dbt.py @@ -303,15 +303,26 @@ def _local_diff( columns_removed = table1_column_names.difference(table2_column_names) # col type is i = 1 in tuple columns_type_changed = { - k for k, v in table2_columns.items() if k in table1_columns and v[1] != table1_columns[k][1] + k for k, v in table2_columns.items() if k in table1_columns and v.data_type != table1_columns[k].data_type } - if columns_added: - diff_output_str += columns_added_template(columns_added) + diff_output_str += f"Primary Keys: {diff_vars.primary_keys} \n" + + if diff_vars.where_filter: + diff_output_str += f"Where Filter: '{str(diff_vars.where_filter)}' \n" + + if diff_vars.include_columns: + diff_output_str += f"Included Columns: {diff_vars.include_columns} \n" + + if diff_vars.exclude_columns: + diff_output_str += f"Excluded Columns: {diff_vars.exclude_columns} \n" if columns_removed: diff_output_str += columns_removed_template(columns_removed) + if columns_added: + diff_output_str += columns_added_template(columns_added) + if columns_type_changed: diff_output_str += columns_type_changed_template(columns_type_changed) column_set = column_set.difference(columns_type_changed) @@ -349,13 +360,14 @@ def _local_diff( return dataset1_columns = [ - (name, type_, table1.database.dialect.parse_type(table1.table_path, name, type_, *other)) - for (name, type_, *other) in table1_columns.values() + (info.column_name, info.data_type, table1.database.dialect.parse_type(table1.table_path, info)) + for info in table1_columns.values() ] dataset2_columns = [ - (name, type_, table2.database.dialect.parse_type(table2.table_path, name, type_, *other)) - for (name, type_, *other) in table2_columns.values() + (info.column_name, info.data_type, table2.database.dialect.parse_type(table2.table_path, info)) + for info in table2_columns.values() ] + print( json.dumps( jsonify( @@ -455,32 +467,57 @@ def _cloud_diff( rows_removed_count = diff_results.pks.exclusives[0] rows_updated = diff_results.values.rows_with_differences - total_rows = diff_results.values.total_rows - rows_unchanged = int(total_rows) - int(rows_updated) + total_rows_table1 = diff_results.pks.total_rows[0] + total_rows_table2 = diff_results.pks.total_rows[1] + total_rows_diff = total_rows_table2 - total_rows_table1 + + rows_unchanged = int(total_rows_table1) - int(rows_updated) - int(rows_removed_count) diff_percent_list = { - x.column_name: str(x.match) + "%" for x in diff_results.values.columns_diff_stats if x.match != 100.0 + x.column_name: f"{str(round(100.00 - x.match, 2))}%" + for x in diff_results.values.columns_diff_stats + if x.match != 100.0 } - columns_added = diff_results.schema_.exclusive_columns[1] - columns_removed = diff_results.schema_.exclusive_columns[0] + columns_added = set(diff_results.schema_.exclusive_columns[1]) + columns_removed = set(diff_results.schema_.exclusive_columns[0]) column_type_changes = diff_results.schema_.column_type_differs - if columns_added: - diff_output_str += columns_added_template(columns_added) + diff_output_str += f"Primary Keys: {diff_vars.primary_keys} \n" + if diff_vars.where_filter: + diff_output_str += f"Where Filter: '{str(diff_vars.where_filter)}' \n" + + if diff_vars.include_columns: + diff_output_str += f"Included Columns: {diff_vars.include_columns} \n" + + if diff_vars.exclude_columns: + diff_output_str += f"Excluded Columns: {diff_vars.exclude_columns} \n" if columns_removed: diff_output_str += columns_removed_template(columns_removed) + if columns_added: + diff_output_str += columns_added_template(columns_added) + if column_type_changes: diff_output_str += columns_type_changed_template(column_type_changes) + deps_impacts = { + key: len(value) + sum(len(item.get("BiHtSync", [])) for item in value) if key == "hightouch" else len(value) + for key, value in diff_results.deps.deps.items() + } + if any([rows_added_count, rows_removed_count, rows_updated]): diff_output = dbt_diff_string_template( - rows_added_count, - rows_removed_count, - rows_updated, - str(rows_unchanged), - diff_percent_list, - "Value Match Percent:", + total_rows_table1=total_rows_table1, + total_rows_table2=total_rows_table2, + total_rows_diff=total_rows_diff, + rows_added=rows_added_count, + rows_removed=rows_removed_count, + rows_updated=rows_updated, + rows_unchanged=str(rows_unchanged), + deps_impacts=deps_impacts, + is_cloud=True, + extra_info_dict=diff_percent_list, + extra_info_str="Value Changed:", ) diff_output_str += f"\n{diff_url}\n {diff_output} \n" rich.print(diff_output_str) @@ -524,7 +561,7 @@ def _cloud_diff( def _diff_output_base(dev_path: str, prod_path: str) -> str: - return f"\n[green]{prod_path} <> {dev_path}[/] \n" + return f"\n[blue]{prod_path}[/] <> [green]{dev_path}[/] \n" def _initialize_events(dbt_user_id: Optional[str], dbt_version: Optional[str], dbt_project_id: Optional[str]) -> None: diff --git a/data_diff/dbt_parser.py b/data_diff/dbt_parser.py index 0d864a573..eda5f6c57 100644 --- a/data_diff/dbt_parser.py +++ b/data_diff/dbt_parser.py @@ -50,7 +50,7 @@ def try_get_dbt_runner(): # ProfileRenderer.render_data() fails without instantiating global flag MACRO_DEBUGGING in dbt-core 1.5 # hacky but seems to be a bug on dbt's end -def try_set_dbt_flags(): +def try_set_dbt_flags() -> None: try: from dbt.flags import set_flags diff --git a/data_diff/diff_tables.py b/data_diff/diff_tables.py index 668024264..31a01a075 100644 --- a/data_diff/diff_tables.py +++ b/data_diff/diff_tables.py @@ -1,16 +1,17 @@ """Provides classes for performing a table diff """ - +import threading import time from abc import ABC, abstractmethod from enum import Enum from contextlib import contextmanager from operator import methodcaller -from typing import Dict, Tuple, Iterator, Optional +from typing import Any, Dict, Set, List, Tuple, Iterator, Optional, Union from concurrent.futures import ThreadPoolExecutor, as_completed import attrs +from data_diff.errors import DataDiffMismatchingKeyTypesError from data_diff.info_tree import InfoTree, SegmentInfo from data_diff.utils import dbt_diff_string_template, run_as_daemon, safezip, getLogger, truncate_error, Vector from data_diff.thread_utils import ThreadedYielder @@ -28,6 +29,7 @@ class Algorithm(Enum): DiffResult = Iterator[Tuple[str, tuple]] # Iterator[Tuple[Literal["+", "-"], tuple]] +DiffResultList = Iterator[List[Tuple[str, tuple]]] @attrs.define(frozen=False) @@ -88,7 +90,7 @@ class DiffResultWrapper: stats: dict result_list: list = attrs.field(factory=list) - def __iter__(self): + def __iter__(self) -> Iterator[Any]: yield from self.result_list for i in self.diff: self.result_list.append(i) @@ -136,14 +138,19 @@ def _get_stats(self, is_dbt: bool = False) -> DiffStats: def get_stats_string(self, is_dbt: bool = False): diff_stats = self._get_stats(is_dbt) + total_rows_diff = diff_stats.table2_count - diff_stats.table1_count + if is_dbt: string_output = dbt_diff_string_template( - diff_stats.diff_by_sign["+"], - diff_stats.diff_by_sign["-"], - diff_stats.diff_by_sign["!"], - diff_stats.unchanged, - diff_stats.extra_column_diffs, - "Values Updated:", + total_rows_table1=diff_stats.table1_count, + total_rows_table2=diff_stats.table2_count, + total_rows_diff=total_rows_diff, + rows_added=diff_stats.diff_by_sign["+"], + rows_removed=diff_stats.diff_by_sign["-"], + rows_updated=diff_stats.diff_by_sign["!"], + rows_unchanged=diff_stats.unchanged, + extra_info_dict=diff_stats.extra_column_diffs, + extra_info_str="[u]Values Changed[/u]", ) else: @@ -181,9 +188,16 @@ def get_stats_dict(self, is_dbt: bool = False): @attrs.define(frozen=False) class TableDiffer(ThreadBase, ABC): + INFO_TREE_CLASS = InfoTree + bisection_factor = 32 stats: dict = {} + ignored_columns1: Set[str] = attrs.field(factory=set) + ignored_columns2: Set[str] = attrs.field(factory=set) + _ignored_columns_lock: threading.Lock = attrs.field(factory=threading.Lock, init=False) + yield_list: bool = False + def diff_tables(self, table1: TableSegment, table2: TableSegment, info_tree: InfoTree = None) -> DiffResultWrapper: """Diff the given tables. @@ -198,12 +212,15 @@ def diff_tables(self, table1: TableSegment, table2: TableSegment, info_tree: Inf Where `row` is a tuple of values, corresponding to the diffed columns. """ if info_tree is None: - info_tree = InfoTree(SegmentInfo([table1, table2])) + segment_info = self.INFO_TREE_CLASS.SEGMENT_INFO_CLASS([table1, table2]) + info_tree = self.INFO_TREE_CLASS(segment_info) return DiffResultWrapper(self._diff_tables_wrapper(table1, table2, info_tree), info_tree, self.stats) def _diff_tables_wrapper(self, table1: TableSegment, table2: TableSegment, info_tree: InfoTree) -> DiffResult: if is_tracking_enabled(): options = attrs.asdict(self, recurse=False) + # not a useful event attribute + options.pop("_ignored_columns_lock") options["differ_name"] = type(self).__name__ event_json = create_start_event_json(options) run_as_daemon(send_event_json, event_json) @@ -251,7 +268,9 @@ def _diff_tables_wrapper(self, table1: TableSegment, table2: TableSegment, info_ def _validate_and_adjust_columns(self, table1: TableSegment, table2: TableSegment) -> None: pass - def _diff_tables_root(self, table1: TableSegment, table2: TableSegment, info_tree: InfoTree) -> DiffResult: + def _diff_tables_root( + self, table1: TableSegment, table2: TableSegment, info_tree: InfoTree + ) -> Union[DiffResult, DiffResultList]: return self._bisect_and_diff_tables(table1, table2, info_tree) @abstractmethod @@ -279,9 +298,13 @@ def _bisect_and_diff_tables(self, table1: TableSegment, table2: TableSegment, in if not isinstance(kt, IKey): raise NotImplementedError(f"Cannot use a column of type {kt} as a key") - for kt1, kt2 in safezip(key_types1, key_types2): + for i, (kt1, kt2) in enumerate(safezip(key_types1, key_types2)): if kt1.python_type is not kt2.python_type: - raise TypeError(f"Incompatible key types: {kt1} and {kt2}") + k1 = table1.key_columns[i] + k2 = table2.key_columns[i] + raise DataDiffMismatchingKeyTypesError( + f"Key columns {k1} and {k2} can't be compared due to different types." + ) # Query min/max values key_ranges = self._threaded_call_as_completed("query_key_range", [table1, table2]) @@ -289,16 +312,17 @@ def _bisect_and_diff_tables(self, table1: TableSegment, table2: TableSegment, in # Start with the first completed value, so we don't waste time waiting min_key1, max_key1 = self._parse_key_range_result(key_types1, next(key_ranges)) - btable1, btable2 = [t.new_key_bounds(min_key=min_key1, max_key=max_key1) for t in (table1, table2)] + btable1 = table1.new_key_bounds(min_key=min_key1, max_key=max_key1, key_types=key_types1) + btable2 = table2.new_key_bounds(min_key=min_key1, max_key=max_key1, key_types=key_types2) logger.info( f"Diffing segments at key-range: {btable1.min_key}..{btable2.max_key}. " f"size: table1 <= {btable1.approximate_size()}, table2 <= {btable2.approximate_size()}" ) - ti = ThreadedYielder(self.max_threadpool_size) + ti = ThreadedYielder(self.max_threadpool_size, self.yield_list) # Bisect (split) the table into segments, and diff them recursively. - ti.submit(self._bisect_and_diff_segments, ti, btable1, btable2, info_tree) + ti.submit(self._bisect_and_diff_segments, ti, btable1, btable2, info_tree, priority=999) # Now we check for the second min-max, to diff the portions we "missed". # This is achieved by subtracting the table ranges, and dividing the resulting space into aligned boxes. @@ -313,7 +337,8 @@ def _bisect_and_diff_tables(self, table1: TableSegment, table2: TableSegment, in # β””β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”˜ # Overall, the max number of new regions in this 2nd pass is 3^|k| - 1 - min_key2, max_key2 = self._parse_key_range_result(key_types1, next(key_ranges)) + # Note: python types can be the same, but the rendering parameters (e.g. casing) can differ. + min_key2, max_key2 = self._parse_key_range_result(key_types2, next(key_ranges)) points = [list(sorted(p)) for p in safezip(min_key1, min_key2, max_key1, max_key2)] box_mesh = create_mesh_from_points(*points) @@ -321,8 +346,9 @@ def _bisect_and_diff_tables(self, table1: TableSegment, table2: TableSegment, in new_regions = [(p1, p2) for p1, p2 in box_mesh if p1 < p2 and not (p1 >= min_key1 and p2 <= max_key1)] for p1, p2 in new_regions: - extra_tables = [t.new_key_bounds(min_key=p1, max_key=p2) for t in (table1, table2)] - ti.submit(self._bisect_and_diff_segments, ti, *extra_tables, info_tree) + extra_table1 = table1.new_key_bounds(min_key=p1, max_key=p2, key_types=key_types1) + extra_table2 = table2.new_key_bounds(min_key=p1, max_key=p2, key_types=key_types2) + ti.submit(self._bisect_and_diff_segments, ti, extra_table1, extra_table2, info_tree, priority=999) return ti @@ -353,6 +379,11 @@ def _bisect_and_diff_segments( biggest_table = max(table1, table2, key=methodcaller("approximate_size")) checkpoints = biggest_table.choose_checkpoints(self.bisection_factor - 1) + # Get it thread-safe, to avoid segment misalignment because of bad timing. + with self._ignored_columns_lock: + table1 = attrs.evolve(table1, ignored_columns=frozenset(self.ignored_columns1)) + table2 = attrs.evolve(table2, ignored_columns=frozenset(self.ignored_columns2)) + # Create new instances of TableSegment between each checkpoint segmented1 = table1.segment_by_checkpoints(checkpoints) segmented2 = table2.segment_by_checkpoints(checkpoints) @@ -363,3 +394,24 @@ def _bisect_and_diff_segments( ti.submit( self._diff_segments, ti, t1, t2, info_node, max_rows, level + 1, i + 1, len(segmented1), priority=level ) + + def ignore_column(self, column_name1: str, column_name2: str) -> None: + """ + Ignore the column (by name on sides A & B) in md5s & diffs from now on. + + This affects 2 places: + + - The columns are not checksumed for new(!) segments. + - The columns are ignored in in-memory diffing for running segments. + + The columns are never ignored in the fetched values, whether they are + the same or different β€” for data consistency. + + Use this feature to collect relatively well-represented differences + across all columns if one of them is highly different in the beginning + of a table (as per the order of segmentation/bisection). Otherwise, + that one column might easily hit the limit and stop the whole diff. + """ + with self._ignored_columns_lock: + self.ignored_columns1.add(column_name1) + self.ignored_columns2.add(column_name2) diff --git a/data_diff/errors.py b/data_diff/errors.py index 3b446b327..b4cb92b8a 100644 --- a/data_diff/errors.py +++ b/data_diff/errors.py @@ -68,3 +68,7 @@ class DataDiffCloudDiffTimedOut(Exception): class DataDiffSimpleSelectNotFound(Exception): "Raised when using --select on dbt < 1.5 and a model node is not found in the manifest." + + +class DataDiffMismatchingKeyTypesError(Exception): + "Raised when the key types of two tables do not match, like VARCHAR and INT." diff --git a/data_diff/hashdiff_tables.py b/data_diff/hashdiff_tables.py index 1b2ec84fb..dd0c36647 100644 --- a/data_diff/hashdiff_tables.py +++ b/data_diff/hashdiff_tables.py @@ -2,9 +2,10 @@ from numbers import Number import logging from collections import defaultdict -from typing import Iterator +from typing import Any, Collection, Dict, Iterator, List, Sequence, Set, Tuple import attrs +from typing_extensions import Literal from data_diff.abcs.database_types import ColType_UUID, NumericType, PrecisionType, StringType, Boolean, JSON from data_diff.info_tree import InfoTree @@ -20,25 +21,57 @@ logger = logging.getLogger("hashdiff_tables") - -def diff_sets(a: list, b: list, json_cols: dict = None) -> Iterator: - sa = set(a) - sb = set(b) - - # The first item is always the key (see TableDiffer.relevant_columns) - # TODO update when we add compound keys to hashdiff - d = defaultdict(list) +# Just for local readability: TODO: later switch to real type declarations of these. +_Op = Literal["+", "-"] +_PK = Sequence[Any] +_Row = Tuple[Any] + + +def diff_sets( + a: Sequence[_Row], + b: Sequence[_Row], + *, + json_cols: dict = None, + columns1: Sequence[str], + columns2: Sequence[str], + key_columns1: Sequence[str], + key_columns2: Sequence[str], + ignored_columns1: Collection[str], + ignored_columns2: Collection[str], +) -> Iterator: + # Group full rows by PKs on each side. The first items are the PK: TableSegment.relevant_columns + rows_by_pks1: Dict[_PK, List[_Row]] = defaultdict(list) + rows_by_pks2: Dict[_PK, List[_Row]] = defaultdict(list) for row in a: - if row not in sb: - d[row[0]].append(("-", row)) + pk: _PK = tuple(val for col, val in zip(key_columns1, row)) + rows_by_pks1[pk].append(row) for row in b: - if row not in sa: - d[row[0]].append(("+", row)) + pk: _PK = tuple(val for col, val in zip(key_columns2, row)) + rows_by_pks2[pk].append(row) + + # Mind that the same pk MUST go in full with all the -/+ rows all at once, for grouping. + diffs_by_pks: Dict[_PK, List[Tuple[_Op, _Row]]] = defaultdict(list) + for pk in sorted(set(rows_by_pks1) | set(rows_by_pks2)): + cutrows1: List[_Row] = [ + tuple(val for col, val in zip(columns1, row1) if col not in ignored_columns1) for row1 in rows_by_pks1[pk] + ] + cutrows2: List[_Row] = [ + tuple(val for col, val in zip(columns2, row2) if col not in ignored_columns2) for row2 in rows_by_pks2[pk] + ] + + # Either side has 0 rows: a clearly exclusive row. + # Either side has 2+ rows: duplicates on either side, yield it all regardless of values. + # Both sides == 1: non-duplicate, non-exclusive, so check for values of interest. + if len(cutrows1) != 1 or len(cutrows2) != 1 or cutrows1 != cutrows2: + for row1 in rows_by_pks1[pk]: + diffs_by_pks[pk].append(("-", row1)) + for row2 in rows_by_pks2[pk]: + diffs_by_pks[pk].append(("+", row2)) warned_diff_cols = set() - for _k, v in sorted(d.items(), key=lambda i: i[0]): + for diffs in (diffs_by_pks[pk] for pk in sorted(diffs_by_pks)): if json_cols: - parsed_match, overriden_diff_cols = diffs_are_equiv_jsons(v, json_cols) + parsed_match, overriden_diff_cols = diffs_are_equiv_jsons(diffs, json_cols) if parsed_match: to_warn = overriden_diff_cols - warned_diff_cols for w in to_warn: @@ -48,7 +81,7 @@ def diff_sets(a: list, b: list, json_cols: dict = None) -> Iterator: ) warned_diff_cols.add(w) continue - yield from v + yield from diffs @attrs.define(frozen=False) @@ -75,7 +108,7 @@ class HashDiffer(TableDiffer): stats: dict = attrs.field(factory=dict) - def __attrs_post_init__(self): + def __attrs_post_init__(self) -> None: # Validate options if self.bisection_factor >= self.bisection_threshold: raise ValueError("Incorrect param values (bisection factor must be lower than threshold)") @@ -93,8 +126,11 @@ def _validate_and_adjust_columns(self, table1: TableSegment, table2: TableSegmen col1 = table1._schema[c1] col2 = table2._schema[c2] if isinstance(col1, PrecisionType): - if strict and not isinstance(col2, PrecisionType): - raise TypeError(f"Incompatible types for column '{c1}': {col1} <-> {col2}") + if not isinstance(col2, PrecisionType): + if strict: + raise TypeError(f"Incompatible types for column '{c1}': {col1} <-> {col2}") + else: + continue lowest = min(col1, col2, key=lambda col: col.precision) @@ -105,8 +141,11 @@ def _validate_and_adjust_columns(self, table1: TableSegment, table2: TableSegmen table2._schema[c2] = attrs.evolve(col2, precision=lowest.precision, rounds=lowest.rounds) elif isinstance(col1, (NumericType, Boolean)): - if strict and not isinstance(col2, (NumericType, Boolean)): - raise TypeError(f"Incompatible types for column '{c1}': {col1} <-> {col2}") + if not isinstance(col2, (NumericType, Boolean)): + if strict: + raise TypeError(f"Incompatible types for column '{c1}': {col1} <-> {col2}") + else: + continue lowest = min(col1, col2, key=lambda col: col.precision) @@ -201,7 +240,19 @@ def _bisect_and_diff_segments( for i, colname in enumerate(table1.extra_columns) if isinstance(table1._schema[colname], JSON) } - diff = list(diff_sets(rows1, rows2, json_cols)) + diff = list( + diff_sets( + rows1, + rows2, + json_cols=json_cols, + columns1=table1.relevant_columns, + columns2=table2.relevant_columns, + key_columns1=table1.key_columns, + key_columns2=table2.key_columns, + ignored_columns1=self.ignored_columns1, + ignored_columns2=self.ignored_columns2, + ) + ) info_tree.info.set_diff(diff) info_tree.info.rowcounts = {1: len(rows1), 2: len(rows2)} diff --git a/data_diff/info_tree.py b/data_diff/info_tree.py index d08eb16dc..a9683c20b 100644 --- a/data_diff/info_tree.py +++ b/data_diff/info_tree.py @@ -1,6 +1,7 @@ from typing import List, Dict, Optional, Any, Tuple, Union import attrs +from typing_extensions import Self from data_diff.table_segment import TableSegment @@ -17,13 +18,15 @@ class SegmentInfo: rowcounts: Dict[int, int] = attrs.field(factory=dict) max_rows: Optional[int] = None - def set_diff(self, diff: List[Union[Tuple[Any, ...], List[Any]]], schema: Optional[Tuple[Tuple[str, type]]] = None): + def set_diff( + self, diff: List[Union[Tuple[Any, ...], List[Any]]], schema: Optional[Tuple[Tuple[str, type]]] = None + ) -> None: self.diff_schema = schema self.diff = diff self.diff_count = len(diff) self.is_diff = self.diff_count > 0 - def update_from_children(self, child_infos): + def update_from_children(self, child_infos) -> None: child_infos = list(child_infos) assert child_infos @@ -41,15 +44,18 @@ def update_from_children(self, child_infos): @attrs.define(frozen=True) class InfoTree: + SEGMENT_INFO_CLASS = SegmentInfo + info: SegmentInfo children: List["InfoTree"] = attrs.field(factory=list) - def add_node(self, table1: TableSegment, table2: TableSegment, max_rows: int = None): - node = InfoTree(SegmentInfo([table1, table2], max_rows=max_rows)) + def add_node(self, table1: TableSegment, table2: TableSegment, max_rows: Optional[int] = None) -> Self: + cls = self.__class__ + node = cls(cls.SEGMENT_INFO_CLASS([table1, table2], max_rows=max_rows)) self.children.append(node) return node - def aggregate_info(self): + def aggregate_info(self) -> None: if self.children: for c in self.children: c.aggregate_info() diff --git a/data_diff/lexicographic_space.py b/data_diff/lexicographic_space.py index b7d88e369..32fdde701 100644 --- a/data_diff/lexicographic_space.py +++ b/data_diff/lexicographic_space.py @@ -66,11 +66,11 @@ class LexicographicSpace: All elements must be of the same length as the number of dimensions. (no rpadding) """ - def __init__(self, dims: Vector): + def __init__(self, dims: Vector) -> None: super().__init__() self.dims = dims - def __contains__(self, v: Vector): + def __contains__(self, v: Vector) -> bool: return all(0 <= i < d for i, d in safezip(v, self.dims)) def add(self, v1: Vector, v2: Vector) -> Vector: @@ -124,7 +124,7 @@ class BoundedLexicographicSpace: i.e. a space resticted by a "bounding-box" between two arbitrary points. """ - def __init__(self, min_bound: Vector, max_bound: Vector): + def __init__(self, min_bound: Vector, max_bound: Vector) -> None: super().__init__() dims = tuple(mx - mn for mn, mx in safezip(min_bound, max_bound)) @@ -138,7 +138,7 @@ def __init__(self, min_bound: Vector, max_bound: Vector): self.uspace = LexicographicSpace(dims) - def __contains__(self, p: Vector): + def __contains__(self, p: Vector) -> bool: return all(mn <= i < mx for i, mn, mx in safezip(p, self.min_bound, self.max_bound)) def to_uspace(self, v: Vector) -> Vector: diff --git a/data_diff/parse_time.py b/data_diff/parse_time.py index 39924798e..ec80ccb44 100644 --- a/data_diff/parse_time.py +++ b/data_diff/parse_time.py @@ -33,7 +33,7 @@ class ParseError(ValueError): UNITS_STR = ", ".join(sorted(TIME_UNITS.keys())) -def string_similarity(a, b): +def string_similarity(a, b) -> SequenceMatcher: return SequenceMatcher(None, a, b).ratio() @@ -53,7 +53,7 @@ def parse_time_atom(count, unit): return count, unit -def parse_time_delta(t: str): +def parse_time_delta(t: str) -> timedelta: time_dict = {} while t: m = TIME_RE.match(t) @@ -70,5 +70,5 @@ def parse_time_delta(t: str): return timedelta(**time_dict) -def parse_time_before(time: datetime, delta: str): +def parse_time_before(time: datetime, delta: str) -> datetime: return time - parse_time_delta(delta) diff --git a/data_diff/queries/api.py b/data_diff/queries/api.py index 82786871a..0ff787190 100644 --- a/data_diff/queries/api.py +++ b/data_diff/queries/api.py @@ -26,7 +26,7 @@ def join(*tables: ITable) -> Join: return Join(tables) -def leftjoin(*tables: ITable): +def leftjoin(*tables: ITable) -> Join: """Left-joins a sequence of table expressions. See Also: ``join()`` @@ -34,7 +34,7 @@ def leftjoin(*tables: ITable): return Join(tables, "LEFT") -def rightjoin(*tables: ITable): +def rightjoin(*tables: ITable) -> Join: """Right-joins a sequence of table expressions. See Also: ``join()`` @@ -42,7 +42,7 @@ def rightjoin(*tables: ITable): return Join(tables, "RIGHT") -def outerjoin(*tables: ITable): +def outerjoin(*tables: ITable) -> Join: """Outer-joins a sequence of table expressions. See Also: ``join()`` @@ -50,7 +50,7 @@ def outerjoin(*tables: ITable): return Join(tables, "FULL OUTER") -def cte(expr: Expr, *, name: Optional[str] = None, params: Sequence[str] = None): +def cte(expr: Expr, *, name: Optional[str] = None, params: Sequence[str] = None) -> Cte: """Define a CTE""" return Cte(expr, name, params) @@ -72,7 +72,7 @@ def table(*path: str, schema: Union[dict, CaseAwareMapping] = None) -> TablePath return TablePath(path, schema) -def or_(*exprs: Expr): +def or_(*exprs: Expr) -> Union[BinBoolOp, Expr]: """Apply OR between a sequence of boolean expressions""" exprs = args_as_tuple(exprs) if len(exprs) == 1: @@ -80,7 +80,7 @@ def or_(*exprs: Expr): return BinBoolOp("OR", exprs) -def and_(*exprs: Expr): +def and_(*exprs: Expr) -> Union[BinBoolOp, Expr]: """Apply AND between a sequence of boolean expressions""" exprs = args_as_tuple(exprs) if len(exprs) == 1: @@ -88,32 +88,32 @@ def and_(*exprs: Expr): return BinBoolOp("AND", exprs) -def sum_(expr: Expr): +def sum_(expr: Expr) -> Func: """Call SUM(expr)""" return Func("sum", [expr]) -def avg(expr: Expr): +def avg(expr: Expr) -> Func: """Call AVG(expr)""" return Func("avg", [expr]) -def min_(expr: Expr): +def min_(expr: Expr) -> Func: """Call MIN(expr)""" return Func("min", [expr]) -def max_(expr: Expr): +def max_(expr: Expr) -> Func: """Call MAX(expr)""" return Func("max", [expr]) -def exists(expr: Expr): +def exists(expr: Expr) -> Func: """Call EXISTS(expr)""" return Func("exists", [expr]) -def if_(cond: Expr, then: Expr, else_: Optional[Expr] = None): +def if_(cond: Expr, then: Expr, else_: Optional[Expr] = None) -> CaseWhen: """Conditional expression, shortcut to when-then-else. Example: @@ -125,7 +125,7 @@ def if_(cond: Expr, then: Expr, else_: Optional[Expr] = None): return when(cond).then(then).else_(else_) -def when(*when_exprs: Expr): +def when(*when_exprs: Expr) -> QB_When: """Start a when-then expression Example: @@ -145,13 +145,13 @@ def when(*when_exprs: Expr): return CaseWhen([]).when(*when_exprs) -def coalesce(*exprs): +def coalesce(*exprs) -> Func: "Returns a call to COALESCE" exprs = args_as_tuple(exprs) return Func("COALESCE", exprs) -def insert_rows_in_batches(db, tbl: TablePath, rows, *, columns=None, batch_size=1024 * 8): +def insert_rows_in_batches(db, tbl: TablePath, rows, *, columns=None, batch_size=1024 * 8) -> None: assert batch_size > 0 rows = list(rows) @@ -160,7 +160,7 @@ def insert_rows_in_batches(db, tbl: TablePath, rows, *, columns=None, batch_size db.query(tbl.insert_rows(batch, columns=columns)) -def current_timestamp(): +def current_timestamp() -> CurrentTimestamp: """Returns CURRENT_TIMESTAMP() or NOW()""" return CurrentTimestamp() diff --git a/data_diff/queries/ast_classes.py b/data_diff/queries/ast_classes.py index 3bf14fd7c..0580e8247 100644 --- a/data_diff/queries/ast_classes.py +++ b/data_diff/queries/ast_classes.py @@ -45,7 +45,7 @@ def _dfs_values(self): if isinstance(v, ExprNode): yield from v._dfs_values() - def cast_to(self, to): + def cast_to(self, to) -> "Cast": return Cast(self, to) @@ -110,7 +110,7 @@ def select(self, *exprs, distinct=SKIP, optimizer_hints=SKIP, **named_exprs) -> resolve_names(self.source_table, exprs) return Select.make(self, columns=exprs, distinct=distinct, optimizer_hints=optimizer_hints) - def where(self, *exprs): + def where(self, *exprs) -> "Select": """Filter the rows, based on the given predicates. (aka Selection)""" exprs = args_as_tuple(exprs) exprs = _drop_skips(exprs) @@ -120,7 +120,7 @@ def where(self, *exprs): resolve_names(self.source_table, exprs) return Select.make(self, where_exprs=exprs) - def order_by(self, *exprs): + def order_by(self, *exprs) -> "Select": """Order the rows lexicographically, according to the given expressions.""" exprs = _drop_skips(exprs) if not exprs: @@ -129,14 +129,14 @@ def order_by(self, *exprs): resolve_names(self.source_table, exprs) return Select.make(self, order_by_exprs=exprs) - def limit(self, limit: int): + def limit(self, limit: int) -> "Select": """Stop yielding rows after the given limit. i.e. take the first 'n=limit' rows""" if limit is SKIP: return self return Select.make(self, limit_expr=limit) - def join(self, target: "ITable"): + def join(self, target: "ITable") -> "Join": """Join the current table with the target table, returning a new table containing both side-by-side. When joining, it's recommended to use explicit tables names, instead of `this`, in order to avoid potential name collisions. @@ -180,7 +180,7 @@ def group_by(self, *keys) -> "GroupBy": return GroupBy(self, keys) - def _get_column(self, name: str): + def _get_column(self, name: str) -> "Column": if self.schema: name = self.schema.get_key(name) # Get the actual name. Might be case-insensitive. return Column(self, name) @@ -188,29 +188,29 @@ def _get_column(self, name: str): # def __getattr__(self, column): # return self._get_column(column) - def __getitem__(self, column): + def __getitem__(self, column) -> "Column": if not isinstance(column, str): raise TypeError() return self._get_column(column) - def count(self): + def count(self) -> "Select": """SELECT count() FROM self""" return Select(self, [Count()]) - def union(self, other: "ITable"): + def union(self, other: "ITable") -> "TableOp": """SELECT * FROM self UNION other""" return TableOp("UNION", self, other) - def union_all(self, other: "ITable"): + def union_all(self, other: "ITable") -> "TableOp": """SELECT * FROM self UNION ALL other""" return TableOp("UNION ALL", self, other) - def minus(self, other: "ITable"): + def minus(self, other: "ITable") -> "TableOp": """SELECT * FROM self EXCEPT other""" # aka return TableOp("EXCEPT", self, other) - def intersect(self, other: "ITable"): + def intersect(self, other: "ITable") -> "TableOp": """SELECT * FROM self INTERSECT other""" return TableOp("INTERSECT", self, other) @@ -233,51 +233,51 @@ def type(self) -> Optional[type]: @attrs.define(frozen=False, eq=False) class LazyOps: - def __add__(self, other): + def __add__(self, other) -> "BinOp": return BinOp("+", [self, other]) - def __sub__(self, other): + def __sub__(self, other) -> "BinOp": return BinOp("-", [self, other]) - def __neg__(self): + def __neg__(self) -> "UnaryOp": return UnaryOp("-", self) - def __gt__(self, other): + def __gt__(self, other) -> "BinBoolOp": return BinBoolOp(">", [self, other]) - def __ge__(self, other): + def __ge__(self, other) -> "BinBoolOp": return BinBoolOp(">=", [self, other]) - def __eq__(self, other): + def __eq__(self, other) -> "BinBoolOp": if other is None: return BinBoolOp("IS", [self, None]) return BinBoolOp("=", [self, other]) - def __lt__(self, other): + def __lt__(self, other) -> "BinBoolOp": return BinBoolOp("<", [self, other]) - def __le__(self, other): + def __le__(self, other) -> "BinBoolOp": return BinBoolOp("<=", [self, other]) - def __or__(self, other): + def __or__(self, other) -> "BinBoolOp": return BinBoolOp("OR", [self, other]) - def __and__(self, other): + def __and__(self, other) -> "BinBoolOp": return BinBoolOp("AND", [self, other]) - def is_distinct_from(self, other): + def is_distinct_from(self, other) -> "IsDistinctFrom": return IsDistinctFrom(self, other) - def like(self, other): + def like(self, other) -> "BinBoolOp": return BinBoolOp("LIKE", [self, other]) - def sum(self): + def sum(self) -> "Func": return Func("SUM", [self]) - def max(self): + def max(self) -> "Func": return Func("MAX", [self]) - def min(self): + def min(self) -> "Func": return Func("MIN", [self]) @@ -523,7 +523,7 @@ class GroupBy(ExprNode, ITable, Root): values: Optional[Sequence[Expr]] = None having_exprs: Optional[Sequence[Expr]] = None - def __attrs_post_init__(self): + def __attrs_post_init__(self) -> None: assert self.keys or self.values def having(self, *exprs) -> Self: diff --git a/data_diff/queries/base.py b/data_diff/queries/base.py index ca8953c40..7f7cd1de1 100644 --- a/data_diff/queries/base.py +++ b/data_diff/queries/base.py @@ -5,7 +5,7 @@ @attrs.define(frozen=True) class _SKIP: - def __repr__(self): + def __repr__(self) -> str: return "SKIP" diff --git a/data_diff/query_utils.py b/data_diff/query_utils.py index a48877288..ed753d31d 100644 --- a/data_diff/query_utils.py +++ b/data_diff/query_utils.py @@ -23,7 +23,7 @@ def _drop_table(name: DbPath): yield commit -def drop_table(db, tbl): +def drop_table(db, tbl) -> None: if isinstance(db, Oracle): db.query(_drop_table_oracle(tbl)) else: @@ -51,6 +51,6 @@ def _append_to_table(path: DbPath, expr: Expr): yield commit -def append_to_table(db, path, expr): +def append_to_table(db, path, expr) -> None: f = _append_to_table_oracle if isinstance(db, Oracle) else _append_to_table db.query(f(path, expr)) diff --git a/data_diff/schema.py b/data_diff/schema.py index 67b4261f1..d77e298b4 100644 --- a/data_diff/schema.py +++ b/data_diff/schema.py @@ -1,4 +1,7 @@ import logging +from typing import Any, Collection, Iterator, Optional + +import attrs from data_diff.utils import CaseAwareMapping, CaseInsensitiveDict, CaseSensitiveDict from data_diff.abcs.database_types import DbPath @@ -8,6 +11,35 @@ Schema = CaseAwareMapping +@attrs.frozen(kw_only=True) +class RawColumnInfo(Collection[Any]): + """ + A raw row representing the schema info about a column. + + Do not rely on this class too much, it will be removed soon when the schema + selecting & parsing methods are united into one overrideable method. + """ + + column_name: str + data_type: str + datetime_precision: Optional[int] = None + numeric_precision: Optional[int] = None + numeric_scale: Optional[int] = None + collation_name: Optional[str] = None + + # It was a tuple once, so we keep it backward compatible temporarily, until remade to classes. + def __iter__(self) -> Iterator[Any]: + return iter( + (self.column_name, self.data_type, self.datetime_precision, self.numeric_precision, self.numeric_scale) + ) + + def __len__(self) -> int: + return 5 + + def __contains__(self, item: Any) -> bool: + return False # that was not used + + def create_schema(db_name: str, table_path: DbPath, schema: dict, case_sensitive: bool) -> CaseAwareMapping: logger.info(f"[{db_name}] Schema = {schema}") diff --git a/data_diff/table_segment.py b/data_diff/table_segment.py index d8f842313..4e712445d 100644 --- a/data_diff/table_segment.py +++ b/data_diff/table_segment.py @@ -1,5 +1,5 @@ import time -from typing import List, Optional, Tuple +from typing import Container, Dict, List, Optional, Sequence, Tuple import logging from itertools import product @@ -9,8 +9,8 @@ from data_diff.utils import safezip, Vector from data_diff.utils import ArithString, split_space from data_diff.databases.base import Database -from data_diff.abcs.database_types import DbPath, DbKey, DbTime -from data_diff.schema import Schema, create_schema +from data_diff.abcs.database_types import DbPath, DbKey, DbTime, IKey +from data_diff.schema import RawColumnInfo, Schema, create_schema from data_diff.queries.extras import Checksum from data_diff.queries.api import Count, SKIP, table, this, Expr, min_, max_, Code from data_diff.queries.extras import ApplyFuncAndNormalizeAsString, NormalizeAsString @@ -114,6 +114,7 @@ class TableSegment: key_columns: Tuple[str, ...] update_column: Optional[str] = None extra_columns: Tuple[str, ...] = () + ignored_columns: Container[str] = frozenset() # Restrict the segment min_key: Optional[Vector] = None @@ -125,7 +126,7 @@ class TableSegment: case_sensitive: Optional[bool] = True _schema: Optional[Schema] = None - def __attrs_post_init__(self): + def __attrs_post_init__(self) -> None: if not self.update_column and (self.min_update or self.max_update): raise ValueError("Error: the min_update/max_update feature requires 'update_column' to be set.") @@ -137,10 +138,10 @@ def __attrs_post_init__(self): f"Error: min_update expected to be smaller than max_update! ({self.min_update} >= {self.max_update})" ) - def _where(self): + def _where(self) -> Optional[str]: return f"({self.where})" if self.where else None - def _with_raw_schema(self, raw_schema: dict) -> Self: + def _with_raw_schema(self, raw_schema: Dict[str, RawColumnInfo]) -> Self: schema = self.database._process_table_schema(self.table_path, raw_schema, self.relevant_columns, self._where()) return self.new(schema=create_schema(self.database.name, self.table_path, schema, self.case_sensitive)) @@ -151,7 +152,7 @@ def with_schema(self) -> Self: return self._with_raw_schema(self.database.query_table_schema(self.table_path)) - def get_schema(self): + def get_schema(self) -> Dict[str, RawColumnInfo]: return self.database.query_table_schema(self.table_path) def _make_key_range(self): @@ -179,7 +180,10 @@ def make_select(self): def get_values(self) -> list: "Download all the relevant values of the segment from the database" - select = self.make_select().select(*self._relevant_columns_repr) + + # Fetch all the original columns, even if some were later excluded from checking. + fetched_cols = [NormalizeAsString(this[c]) for c in self.relevant_columns] + select = self.make_select().select(*fetched_cols) return self.database.query(select, List[Tuple]) def choose_checkpoints(self, count: int) -> List[List[DbKey]]: @@ -201,7 +205,7 @@ def new(self, **kwargs) -> Self: """Creates a copy of the instance using 'replace()'""" return attrs.evolve(self, **kwargs) - def new_key_bounds(self, min_key: Vector, max_key: Vector) -> Self: + def new_key_bounds(self, min_key: Vector, max_key: Vector, *, key_types: Optional[Sequence[IKey]] = None) -> Self: if self.min_key is not None: assert self.min_key <= min_key, (self.min_key, min_key) assert self.min_key < max_key @@ -210,6 +214,13 @@ def new_key_bounds(self, min_key: Vector, max_key: Vector) -> Self: assert min_key < self.max_key assert max_key <= self.max_key + # If asked, enforce the PKs to proper types, mainly to meta-params of the relevant side, + # so that we do not leak e.g. casing of UUIDs from side A to side B and vice versa. + # If not asked, keep the meta-params of the keys as is (assume them already casted). + if key_types is not None: + min_key = Vector(type.make_value(val) for type, val in safezip(key_types, min_key)) + max_key = Vector(type.make_value(val) for type, val in safezip(key_types, max_key)) + return attrs.evolve(self, min_key=min_key, max_key=max_key) @property @@ -221,18 +232,18 @@ def relevant_columns(self) -> List[str]: return list(self.key_columns) + extras - @property - def _relevant_columns_repr(self) -> List[Expr]: - return [NormalizeAsString(this[c]) for c in self.relevant_columns] - def count(self) -> int: """Count how many rows are in the segment, in one pass.""" return self.database.query(self.make_select().select(Count()), int) def count_and_checksum(self) -> Tuple[int, int]: """Count and checksum the rows in the segment, in one pass.""" + + checked_columns = [c for c in self.relevant_columns if c not in self.ignored_columns] + cols = [NormalizeAsString(this[c]) for c in checked_columns] + start = time.monotonic() - q = self.make_select().select(Count(), Checksum(self._relevant_columns_repr)) + q = self.make_select().select(Count(), Checksum(cols)) count, checksum = self.database.query(q, tuple) duration = time.monotonic() - start if duration > RECOMMENDED_CHECKSUM_DURATION: diff --git a/data_diff/thread_utils.py b/data_diff/thread_utils.py index 55b2d9d56..2b9bb3db3 100644 --- a/data_diff/thread_utils.py +++ b/data_diff/thread_utils.py @@ -5,7 +5,7 @@ from concurrent.futures import ThreadPoolExecutor from concurrent.futures.thread import _WorkItem from time import sleep -from typing import Callable, Iterator, Optional +from typing import Any, Callable, Iterator, Optional import attrs @@ -19,7 +19,7 @@ class AutoPriorityQueue(PriorityQueue): _counter = itertools.count().__next__ - def put(self, item: Optional[_WorkItem], block=True, timeout=None): + def put(self, item: Optional[_WorkItem], block=True, timeout=None) -> None: priority = item.kwargs.pop("priority") if item is not None else 0 super().put((-priority, self._counter(), item), block, timeout) @@ -34,7 +34,7 @@ class PriorityThreadPoolExecutor(ThreadPoolExecutor): XXX WARNING: Might break in future versions of Python """ - def __init__(self, *args): + def __init__(self, *args) -> None: super().__init__(*args) self._work_queue = AutoPriorityQueue() @@ -56,26 +56,31 @@ class ThreadedYielder(Iterable): _futures: deque _yield: deque = attrs.field(alias="_yield") # Python keyword! _exception: Optional[None] + yield_list: bool - def __init__(self, max_workers: Optional[int] = None): + def __init__(self, max_workers: Optional[int] = None, yield_list: bool = False) -> None: super().__init__() self._pool = PriorityThreadPoolExecutor(max_workers) self._futures = deque() self._yield = deque() self._exception = None + self.yield_list = yield_list - def _worker(self, fn, *args, **kwargs): + def _worker(self, fn, *args, **kwargs) -> None: try: res = fn(*args, **kwargs) if res is not None: - self._yield += res + if self.yield_list: + self._yield.append(res) + else: + self._yield += res except Exception as e: self._exception = e - def submit(self, fn: Callable, *args, priority: int = 0, **kwargs): + def submit(self, fn: Callable, *args, priority: int = 0, **kwargs) -> None: self._futures.append(self._pool.submit(self._worker, fn, *args, priority=priority, **kwargs)) - def __iter__(self) -> Iterator: + def __iter__(self) -> Iterator[Any]: while True: if self._exception: raise self._exception diff --git a/data_diff/tracking.py b/data_diff/tracking.py index 42f44dbb9..2f2a33701 100644 --- a/data_diff/tracking.py +++ b/data_diff/tracking.py @@ -80,16 +80,16 @@ def bool_notify_about_extension() -> bool: entrypoint_name = "Python API" -def disable_tracking(): +def disable_tracking() -> None: global g_tracking_enabled g_tracking_enabled = False -def is_tracking_enabled(): +def is_tracking_enabled() -> bool: return g_tracking_enabled -def set_entrypoint_name(s): +def set_entrypoint_name(s) -> None: global entrypoint_name entrypoint_name = s @@ -99,22 +99,22 @@ def set_entrypoint_name(s): dbt_project_id = None -def set_dbt_user_id(s): +def set_dbt_user_id(s) -> None: global dbt_user_id dbt_user_id = s -def set_dbt_version(s): +def set_dbt_version(s) -> None: global dbt_version dbt_version = s -def set_dbt_project_id(s): +def set_dbt_project_id(s) -> None: global dbt_project_id dbt_project_id = s -def get_anonymous_id(): +def get_anonymous_id() -> str: global g_anonymous_id if g_anonymous_id is None: profile = _load_profile() @@ -201,10 +201,27 @@ def create_email_signup_event_json(email: str) -> Dict[str, Any]: } -def send_event_json(event_json): +def convert_sets_to_lists(obj): + """ + Recursively convert sets in the given object to lists. + """ + if isinstance(obj, set): + return list(obj) + elif isinstance(obj, dict): + return {k: convert_sets_to_lists(v) for k, v in obj.items()} + elif isinstance(obj, list): + return [convert_sets_to_lists(elem) for elem in obj] + else: + return obj + + +def send_event_json(event_json) -> None: if not g_tracking_enabled: raise RuntimeError("Won't send; tracking is disabled!") + # Convert sets to lists in event_json + event_json = convert_sets_to_lists(event_json) + headers = { "Content-Type": "application/json", "Authorization": "Basic MkhndE00SGNxOUJtZWlDcU5ZaHo3Tzl0a2pNOg==", diff --git a/data_diff/utils.py b/data_diff/utils.py index b9045cc13..8c8e27154 100644 --- a/data_diff/utils.py +++ b/data_diff/utils.py @@ -43,7 +43,14 @@ def safezip(*args): return zip(*args) -def is_uuid(u): +UUID_PATTERN = re.compile(r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}", re.I) + + +def is_uuid(u: str) -> bool: + # E.g., hashlib.md5(b'hello') is a 32-letter hex number, but not an UUID. + # It would fail UUID-like comparison (< & >) because of casing and dashes. + if not UUID_PATTERN.fullmatch(u): + return False try: UUID(u) except ValueError: @@ -73,7 +80,7 @@ def new(self, initial=()) -> Self: class CaseInsensitiveDict(CaseAwareMapping): - def __init__(self, initial): + def __init__(self, initial) -> None: super().__init__() self._dict = {k.lower(): (k, v) for k, v in dict(initial).items()} @@ -86,13 +93,13 @@ def __iter__(self) -> Iterator[V]: def __len__(self) -> int: return len(self._dict) - def __setitem__(self, key: str, value): + def __setitem__(self, key: str, value) -> None: k = key.lower() if k in self._dict: key = self._dict[k][0] self._dict[k] = key, value - def __delitem__(self, key: str): + def __delitem__(self, key: str) -> None: del self._dict[key.lower()] def get_key(self, key: str) -> str: @@ -128,23 +135,75 @@ def range(self, other: "ArithString", count: int) -> List[Self]: return [self.new(int=i) for i in checkpoints] -# @attrs.define # not as long as it inherits from UUID -class ArithUUID(UUID, ArithString): +def _any_to_uuid(v: Union[str, int, UUID, "ArithUUID"]) -> UUID: + if isinstance(v, ArithUUID): + return v.uuid + elif isinstance(v, UUID): + return v + elif isinstance(v, str): + return UUID(v) + elif isinstance(v, int): + return UUID(int=v) + else: + raise ValueError(f"Cannot convert a value to UUID: {v!r}") + + +@attrs.define(frozen=True, eq=False, order=False) +class ArithUUID(ArithString): "A UUID that supports basic arithmetic (add, sub)" - def __int__(self): - return self.int + uuid: UUID = attrs.field(converter=_any_to_uuid) + lowercase: Optional[bool] = None + uppercase: Optional[bool] = None + + def range(self, other: "ArithUUID", count: int) -> List[Self]: + assert isinstance(other, ArithUUID) + checkpoints = split_space(self.uuid.int, other.uuid.int, count) + return [attrs.evolve(self, uuid=i) for i in checkpoints] + + def __int__(self) -> int: + return self.uuid.int def __add__(self, other: int) -> Self: if isinstance(other, int): - return self.new(int=self.int + other) + return attrs.evolve(self, uuid=self.uuid.int + other) return NotImplemented - def __sub__(self, other: Union[UUID, int]): + def __sub__(self, other: Union["ArithUUID", int]): if isinstance(other, int): - return self.new(int=self.int - other) - elif isinstance(other, UUID): - return self.int - other.int + return attrs.evolve(self, uuid=self.uuid.int - other) + elif isinstance(other, ArithUUID): + return self.uuid.int - other.uuid.int + return NotImplemented + + def __eq__(self, other: object) -> bool: + if isinstance(other, ArithUUID): + return self.uuid == other.uuid + return NotImplemented + + def __ne__(self, other: object) -> bool: + if isinstance(other, ArithUUID): + return self.uuid != other.uuid + return NotImplemented + + def __gt__(self, other: object) -> bool: + if isinstance(other, ArithUUID): + return self.uuid > other.uuid + return NotImplemented + + def __lt__(self, other: object) -> bool: + if isinstance(other, ArithUUID): + return self.uuid < other.uuid + return NotImplemented + + def __ge__(self, other: object) -> bool: + if isinstance(other, ArithUUID): + return self.uuid >= other.uuid + return NotImplemented + + def __le__(self, other: object) -> bool: + if isinstance(other, ArithUUID): + return self.uuid <= other.uuid return NotImplemented @@ -182,7 +241,7 @@ class ArithAlphanumeric(ArithString): _str: str _max_len: Optional[int] = None - def __attrs_post_init__(self): + def __attrs_post_init__(self) -> None: if self._str is None: raise ValueError("Alphanum string cannot be None") if self._max_len and len(self._str) > self._max_len: @@ -196,16 +255,16 @@ def __attrs_post_init__(self): # def int(self): # return alphanumToNumber(self._str, alphanums) - def __str__(self): + def __str__(self) -> str: s = self._str if self._max_len: s = s.rjust(self._max_len, alphanums[0]) return s - def __len__(self): + def __len__(self) -> int: return len(self._str) - def __repr__(self): + def __repr__(self) -> str: return f'alphanum"{self._str}"' def __add__(self, other: "Union[ArithAlphanumeric, int]") -> Self: @@ -230,17 +289,17 @@ def __sub__(self, other: "Union[ArithAlphanumeric, int]") -> float: return NotImplemented - def __ge__(self, other): + def __ge__(self, other) -> bool: if not isinstance(other, type(self)): return NotImplemented return self._str >= other._str - def __lt__(self, other): + def __lt__(self, other) -> bool: if not isinstance(other, type(self)): return NotImplemented return self._str < other._str - def __eq__(self, other): + def __eq__(self, other) -> bool: if not isinstance(other, type(self)): return NotImplemented return self._str == other._str @@ -365,32 +424,32 @@ class Vector(tuple): Partial implementation: Only the needed functionality is implemented """ - def __lt__(self, other: "Vector"): + def __lt__(self, other: "Vector") -> bool: if isinstance(other, Vector): return all(a < b for a, b in safezip(self, other)) return NotImplemented - def __le__(self, other: "Vector"): + def __le__(self, other: "Vector") -> bool: if isinstance(other, Vector): return all(a <= b for a, b in safezip(self, other)) return NotImplemented - def __gt__(self, other: "Vector"): + def __gt__(self, other: "Vector") -> bool: if isinstance(other, Vector): return all(a > b for a, b in safezip(self, other)) return NotImplemented - def __ge__(self, other: "Vector"): + def __ge__(self, other: "Vector") -> bool: if isinstance(other, Vector): return all(a >= b for a, b in safezip(self, other)) return NotImplemented - def __eq__(self, other: "Vector"): + def __eq__(self, other: "Vector") -> bool: if isinstance(other, Vector): return all(a == b for a, b in safezip(self, other)) return NotImplemented - def __sub__(self, other: "Vector"): + def __sub__(self, other: "Vector") -> "Vector": if isinstance(other, Vector): return Vector((a - b) for a, b in safezip(self, other)) raise NotImplementedError() @@ -400,19 +459,59 @@ def __repr__(self) -> str: def dbt_diff_string_template( - rows_added: str, rows_removed: str, rows_updated: str, rows_unchanged: str, extra_info_dict: Dict, extra_info_str + total_rows_table1: int, + total_rows_table2: int, + total_rows_diff: int, + rows_added: int, + rows_removed: int, + rows_updated: int, + rows_unchanged: int, + extra_info_dict: Dict, + extra_info_str: str, + is_cloud: Optional[bool] = False, + deps_impacts: Optional[Dict] = None, ) -> str: - string_output = f"\n{tabulate([[rows_added, rows_removed]], headers=['Rows Added', 'Rows Removed'])}" + # main table + main_rows = [ + ["Total", total_rows_table1, "", f"{total_rows_table2} [{diff_int_dynamic_color_template(total_rows_diff)}]"], + ["Added", "", diff_int_dynamic_color_template(rows_added), ""], + ["Removed", "", diff_int_dynamic_color_template(-rows_removed), ""], + ["Different", "", rows_updated, ""], + ["Unchanged", "", rows_unchanged, ""], + ] + + main_headers = ["rows", "PROD", "<>", "DEV"] + main_table = tabulate(main_rows, headers=main_headers) + + # diffs table + diffs_rows = sorted(list(extra_info_dict.items())) + + diffs_headers = ["columns", "% diff values" if is_cloud else "# diff values"] + diffs_table = tabulate(diffs_rows, headers=diffs_headers) + + # deps impacts table + deps_impacts_table = "" + if deps_impacts: + deps_impacts_rows = list(deps_impacts.items()) + deps_impacts_headers = ["deps", "# data assets"] + deps_impacts_table = f"\n\n{tabulate(deps_impacts_rows, headers=deps_impacts_headers)}" + + # combine all tables + string_output = f"\n{main_table}\n\n{diffs_table}{deps_impacts_table}" - string_output += f"\n\nUpdated Rows: {rows_updated}\n" - string_output += f"Unchanged Rows: {rows_unchanged}\n\n" + return string_output - string_output += extra_info_str - for k, v in extra_info_dict.items(): - string_output += f"\n{k}: {v}" +def diff_int_dynamic_color_template(diff_value: int) -> str: + if not isinstance(diff_value, int): + return diff_value - return string_output + if diff_value > 0: + return f"[green]+{diff_value}[/]" + elif diff_value < 0: + return f"[red]{diff_value}[/]" + else: + return "0" def _jsons_equiv(a: str, b: str): @@ -439,18 +538,18 @@ def diffs_are_equiv_jsons(diff: list, json_cols: dict): return match, overriden_diff_cols -def columns_removed_template(columns_removed) -> str: - columns_removed_str = f"Column(s) removed: {columns_removed}\n" +def columns_removed_template(columns_removed: set) -> str: + columns_removed_str = f"[red]Columns removed [-{len(columns_removed)}]:[/] [blue]{columns_removed}[/]\n" return columns_removed_str -def columns_added_template(columns_added) -> str: - columns_added_str = f"Column(s) added: {columns_added}\n" +def columns_added_template(columns_added: set) -> str: + columns_added_str = f"[green]Columns added [+{len(columns_added)}]: {columns_added}[/]\n" return columns_added_str def columns_type_changed_template(columns_type_changed) -> str: - columns_type_changed_str = f"Type change: {columns_type_changed}\n" + columns_type_changed_str = f"Type changed [{len(columns_type_changed)}]: [green]{columns_type_changed}[/]\n" return columns_type_changed_str @@ -481,7 +580,7 @@ class LogStatusHandler(logging.Handler): This log handler can be used to update a rich.status every time a log is emitted. """ - def __init__(self): + def __init__(self) -> None: super().__init__() self.status = Status("") self.prefix = "" @@ -516,12 +615,12 @@ class UnknownMeta(type): def __instancecheck__(self, instance): return instance is Unknown - def __repr__(self): + def __repr__(self) -> str: return "Unknown" class Unknown(metaclass=UnknownMeta): - def __nonzero__(self): + def __bool__(self) -> bool: raise TypeError() def __new__(class_, *args, **kwargs): diff --git a/data_diff/version.py b/data_diff/version.py index 1f4c4d43b..ae6db5f17 100644 --- a/data_diff/version.py +++ b/data_diff/version.py @@ -1 +1 @@ -__version__ = "0.10.1" +__version__ = "0.11.0" diff --git a/poetry.lock b/poetry.lock index e07c24027..f4b6a6ccb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1956,13 +1956,13 @@ jeepney = ">=0.6" [[package]] name = "setuptools" -version = "69.0.2" +version = "69.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, - {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, ] [package.extras] @@ -2274,6 +2274,7 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.link testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [extras] +all-dbs = ["clickhouse-driver", "cryptography", "duckdb", "mysql-connector-python", "oracledb", "preql", "presto-python-client", "psycopg2", "pyodbc", "snowflake-connector-python", "trino", "vertica-python"] clickhouse = ["clickhouse-driver"] duckdb = ["duckdb"] mssql = ["pyodbc"] @@ -2290,4 +2291,4 @@ vertica = ["vertica-python"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "54f5248145c8920e27de3e781bb01461b3f68bc6335d75d8793b19dffd5f4163" +content-hash = "3ba4fe6135b2011602f98b947cbee7c33cc7ac6ec8d80bf620ec5b381d4706f3" diff --git a/pyproject.toml b/pyproject.toml index 0fe15b4c2..13663d8d1 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "data-diff" -version = "0.10.1" +version = "0.11.0" description = "Command-line tool and Python library to efficiently diff rows across two different databases." authors = ["Datafold "] license = "MIT" @@ -80,6 +80,10 @@ trino = ["trino"] clickhouse = ["clickhouse-driver"] vertica = ["vertica-python"] duckdb = ["duckdb"] +all-dbs = [ + "preql", "mysql-connector-python", "psycopg2", "snowflake-connector-python", "cryptography", "presto-python-client", + "oracledb", "pyodbc", "trino", "clickhouse-driver", "vertica-python", "duckdb" +] [tool.poetry.group.dev.dependencies] pre-commit = "^3.5.0" diff --git a/tests/cloud/test_data_source.py b/tests/cloud/test_data_source.py index cc524a325..1e4a81297 100644 --- a/tests/cloud/test_data_source.py +++ b/tests/cloud/test_data_source.py @@ -22,7 +22,7 @@ _test_data_source, ) from data_diff.dbt_parser import TDatadiffConfig - +from tests.common import ansi_stdout_cleanup DATA_SOURCE_CONFIGS = { "snowflake": TDsConfig( @@ -262,7 +262,7 @@ def test_create_ds_snowflake_config_from_dbt_profiles_one_param_passed_through_i ) self.assertEqual(actual_config, config) self.assertEqual( - mock_stdout.getvalue().strip(), + ansi_stdout_cleanup(mock_stdout.getvalue().strip()), 'Cannot extract "account" from dbt profiles.yml. Please, type it manually', ) @@ -294,7 +294,7 @@ def test_create_ds_config_validate_required_parameter(self, mock_stdout): data_source_name=config.name, ) self.assertEqual(actual_config, config) - self.assertEqual(mock_stdout.getvalue().strip(), "Parameter must not be empty") + self.assertEqual(ansi_stdout_cleanup(mock_stdout.getvalue().strip()), "Parameter must not be empty") def test_check_data_source_exists(self): self.assertEqual(_check_data_source_exists(self.data_sources, self.data_sources[0].name), self.data_sources[0]) diff --git a/tests/common.py b/tests/common.py index 2fc6be196..d6dd94e1e 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1,5 +1,6 @@ import hashlib import os +import re import string import random from typing import Callable @@ -84,15 +85,8 @@ def get_git_revision_short_hash() -> str: db.MsSQL: TEST_MSSQL_CONN_STRING, } -_database_instances = {} - - -def get_conn(cls: type, shared: bool = True) -> Database: - if shared: - if cls not in _database_instances: - _database_instances[cls] = get_conn(cls, shared=False) - return _database_instances[cls] +def get_conn(cls: type) -> Database: return connect(CONN_STRINGS[cls], N_THREADS) @@ -134,17 +128,16 @@ def str_to_checksum(str: str): class DiffTestCase(unittest.TestCase): - "Sets up two tables for diffing" + """Sets up two tables for diffing""" db_cls = None src_schema = None dst_schema = None - shared_connection = True def setUp(self): assert self.db_cls, self.db_cls - self.connection = get_conn(self.db_cls, self.shared_connection) + self.connection = get_conn(self.db_cls) table_suffix = random_table_suffix() self.table_src_name = f"src{table_suffix}" @@ -187,3 +180,7 @@ def table_segment(database, table_path, key_columns, *args, **kw): if isinstance(key_columns, str): key_columns = (key_columns,) return TableSegment(database, table_path, key_columns, *args, **kw) + + +def ansi_stdout_cleanup(ansi_input) -> str: + return re.sub(r"\x1B\[[0-?]*[ -/]*[@-~]", "", ansi_input) diff --git a/tests/dbt_artifacts/jaffle_shop.duckdb b/tests/dbt_artifacts/jaffle_shop.duckdb index b0899b5f3..c36b00f56 100644 Binary files a/tests/dbt_artifacts/jaffle_shop.duckdb and b/tests/dbt_artifacts/jaffle_shop.duckdb differ diff --git a/tests/dbt_artifacts/target/manifest.json b/tests/dbt_artifacts/target/manifest.json index f46aa2542..46254a58a 100644 --- a/tests/dbt_artifacts/target/manifest.json +++ b/tests/dbt_artifacts/target/manifest.json @@ -1 +1 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v8.json", "dbt_version": "1.4.5", "generated_at": "2023-03-28T17:53:00.231489Z", "invocation_id": "289d7789-15b8-44be-a1f6-828f34858212", "env": {}, "project_id": "06e5b98c2db46f8a72cc4f66410e9b3b", "user_id": "1974995a-a39c-4b24-bacf-adfe12e92602", "send_anonymous_usage_stats": true, "adapter_type": "duckdb"}, "nodes": {"model.jaffle_shop.customers": {"database": "jaffle_shop", "schema": "dev", "name": "customers", "resource_type": "model", "package_name": "jaffle_shop", "path": "customers.sql", "original_file_path": "models/customers.sql", "unique_id": "model.jaffle_shop.customers", "fqn": ["jaffle_shop", "customers"], "alias": "customers", "checksum": {"name": "sha256", "checksum": "455b90a31f418ae776213ad9932c7cb72d19a5269a8c722bd9f4e44957313ce8"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "This table has basic information about a customer, as well as some derived facts based on a customer's orders", "columns": {"customer_id": {"name": "customer_id", "description": "This is a unique identifier for a customer", "meta": {}, "data_type": null, "quote": null, "tags": ["primary-key"]}, "first_name": {"name": "first_name", "description": "Customer's first name. PII.", "meta": {}, "data_type": null, "quote": null, "tags": []}, "last_name": {"name": "last_name", "description": "Customer's last name. PII.", "meta": {}, "data_type": null, "quote": null, "tags": []}, "first_order": {"name": "first_order", "description": "Date (UTC) of a customer's first order", "meta": {}, "data_type": null, "quote": null, "tags": []}, "most_recent_order": {"name": "most_recent_order", "description": "Date (UTC) of a customer's most recent order", "meta": {}, "data_type": null, "quote": null, "tags": []}, "number_of_orders": {"name": "number_of_orders", "description": "Count of the number of orders a customer has placed", "meta": {}, "data_type": null, "quote": null, "tags": []}, "total_order_amount": {"name": "total_order_amount", "description": "Total value (AUD) of a customer's orders", "meta": {}, "data_type": null, "quote": null, "tags": []}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "jaffle_shop://models/schema.yml", "build_path": "target/run/jaffle_shop/models/customers.sql", "deferred": false, "unrendered_config": {"materialized": "table"}, "created_at": 1680025829.8450181, "relation_name": "\"jaffle_shop\".\"dev\".\"customers\"", "raw_code": "with customers as (\n\n select * from {{ ref('stg_customers') }}\n\n),\n\norders as (\n\n select * from {{ ref('stg_orders') }}\n\n),\n\npayments as (\n\n select * from {{ ref('stg_payments') }}\n\n),\n\ncustomer_orders as (\n\n select\n customer_id,\n\n min(order_date) as first_order,\n max(order_date) as most_recent_order,\n count(order_id) as number_of_orders\n from orders\n\n group by customer_id\n\n),\n\ncustomer_payments as (\n\n select\n orders.customer_id,\n sum(amount) as total_amount\n\n from payments\n\n left join orders on\n payments.order_id = orders.order_id\n\n group by orders.customer_id\n\n),\n\nfinal as (\n\n select\n customers.customer_id,\n customers.first_name,\n customers.last_name,\n customer_orders.first_order,\n customer_orders.most_recent_order,\n customer_orders.number_of_orders,\n customer_payments.total_amount as customer_lifetime_value\n\n from customers\n\n left join customer_orders\n on customers.customer_id = customer_orders.customer_id\n\n left join customer_payments\n on customers.customer_id = customer_payments.customer_id\n\n)\n\nselect * from final", "language": "sql", "refs": [["stg_customers"], ["stg_orders"], ["stg_payments"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.jaffle_shop.stg_customers", "model.jaffle_shop.stg_orders", "model.jaffle_shop.stg_payments"]}, "compiled_path": "target/compiled/jaffle_shop/models/customers.sql", "compiled": true, "compiled_code": "with customers as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_customers\"\n\n),\n\norders as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_orders\"\n\n),\n\npayments as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_payments\"\n\n),\n\ncustomer_orders as (\n\n select\n customer_id,\n\n min(order_date) as first_order,\n max(order_date) as most_recent_order,\n count(order_id) as number_of_orders\n from orders\n\n group by customer_id\n\n),\n\ncustomer_payments as (\n\n select\n orders.customer_id,\n sum(amount) as total_amount\n\n from payments\n\n left join orders on\n payments.order_id = orders.order_id\n\n group by orders.customer_id\n\n),\n\nfinal as (\n\n select\n customers.customer_id,\n customers.first_name,\n customers.last_name,\n customer_orders.first_order,\n customer_orders.most_recent_order,\n customer_orders.number_of_orders,\n customer_payments.total_amount as customer_lifetime_value\n\n from customers\n\n left join customer_orders\n on customers.customer_id = customer_orders.customer_id\n\n left join customer_payments\n on customers.customer_id = customer_payments.customer_id\n\n)\n\nselect * from final", "extra_ctes_injected": true, "extra_ctes": []}, "model.jaffle_shop.orders": {"database": "jaffle_shop", "schema": "dev", "name": "orders", "resource_type": "model", "package_name": "jaffle_shop", "path": "orders.sql", "original_file_path": "models/orders.sql", "unique_id": "model.jaffle_shop.orders", "fqn": ["jaffle_shop", "orders"], "alias": "orders", "checksum": {"name": "sha256", "checksum": "c99b996bdc622502e5d0062779f76f74331f88824cffbd36d14cc6075c394d9a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "This table has basic information about orders, as well as some derived facts based on payments", "columns": {"order_id": {"name": "order_id", "description": "This is a unique identifier for an order", "meta": {}, "data_type": null, "quote": null, "tags": ["primary-key"]}, "customer_id": {"name": "customer_id", "description": "Foreign key to the customers table", "meta": {}, "data_type": null, "quote": null, "tags": []}, "order_date": {"name": "order_date", "description": "Date (UTC) that the order was placed", "meta": {}, "data_type": null, "quote": null, "tags": []}, "status": {"name": "status", "description": "Orders can be one of the following statuses:\n\n| status | description |\n|----------------|------------------------------------------------------------------------------------------------------------------------|\n| placed | The order has been placed but has not yet left the warehouse |\n| shipped | The order has ben shipped to the customer and is currently in transit |\n| completed | The order has been received by the customer |\n| return_pending | The customer has indicated that they would like to return the order, but it has not yet been received at the warehouse |\n| returned | The order has been returned by the customer and received at the warehouse |", "meta": {}, "data_type": null, "quote": null, "tags": []}, "amount": {"name": "amount", "description": "Total amount (AUD) of the order", "meta": {}, "data_type": null, "quote": null, "tags": []}, "credit_card_amount": {"name": "credit_card_amount", "description": "Amount of the order (AUD) paid for by credit card", "meta": {}, "data_type": null, "quote": null, "tags": []}, "coupon_amount": {"name": "coupon_amount", "description": "Amount of the order (AUD) paid for by coupon", "meta": {}, "data_type": null, "quote": null, "tags": []}, "bank_transfer_amount": {"name": "bank_transfer_amount", "description": "Amount of the order (AUD) paid for by bank transfer", "meta": {}, "data_type": null, "quote": null, "tags": []}, "gift_card_amount": {"name": "gift_card_amount", "description": "Amount of the order (AUD) paid for by gift card", "meta": {}, "data_type": null, "quote": null, "tags": []}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "jaffle_shop://models/schema.yml", "build_path": "target/run/jaffle_shop/models/orders.sql", "deferred": false, "unrendered_config": {"materialized": "table"}, "created_at": 1680025829.846783, "relation_name": "\"jaffle_shop\".\"dev\".\"orders\"", "raw_code": "{% set payment_methods = ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] %}\n\nwith orders as (\n\n select * from {{ ref('stg_orders') }}\n\n),\n\npayments as (\n\n select * from {{ ref('stg_payments') }}\n\n),\n\norder_payments as (\n\n select\n order_id,\n\n {% for payment_method in payment_methods -%}\n sum(case when payment_method = '{{ payment_method }}' then amount else 0 end) as {{ payment_method }}_amount,\n {% endfor -%}\n\n sum(amount) as total_amount\n\n from payments\n\n group by order_id\n\n),\n\nfinal as (\n\n select\n orders.order_id,\n orders.customer_id,\n orders.order_date,\n orders.status,\n\n {% for payment_method in payment_methods -%}\n\n order_payments.{{ payment_method }}_amount,\n\n {% endfor -%}\n\n order_payments.total_amount as amount\n\n from orders\n\n\n left join order_payments\n on orders.order_id = order_payments.order_id\n\n)\n\nselect * from final\nLIMIT 90", "language": "sql", "refs": [["stg_orders"], ["stg_payments"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.jaffle_shop.stg_orders", "model.jaffle_shop.stg_payments"]}, "compiled_path": "target/compiled/jaffle_shop/models/orders.sql", "compiled": true, "compiled_code": "\n\nwith orders as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_orders\"\n\n),\n\npayments as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_payments\"\n\n),\n\norder_payments as (\n\n select\n order_id,\n\n sum(case when payment_method = 'credit_card' then amount else 0 end) as credit_card_amount,\n sum(case when payment_method = 'coupon' then amount else 0 end) as coupon_amount,\n sum(case when payment_method = 'bank_transfer' then amount else 0 end) as bank_transfer_amount,\n sum(case when payment_method = 'gift_card' then amount else 0 end) as gift_card_amount,\n sum(amount) as total_amount\n\n from payments\n\n group by order_id\n\n),\n\nfinal as (\n\n select\n orders.order_id,\n orders.customer_id,\n orders.order_date,\n orders.status,\n\n order_payments.credit_card_amount,\n\n order_payments.coupon_amount,\n\n order_payments.bank_transfer_amount,\n\n order_payments.gift_card_amount,\n\n order_payments.total_amount as amount\n\n from orders\n\n\n left join order_payments\n on orders.order_id = order_payments.order_id\n\n)\n\nselect * from final\nLIMIT 90", "extra_ctes_injected": true, "extra_ctes": []}, "model.jaffle_shop.stg_customers": {"database": "jaffle_shop", "schema": "dev", "name": "stg_customers", "resource_type": "model", "package_name": "jaffle_shop", "path": "staging/stg_customers.sql", "original_file_path": "models/staging/stg_customers.sql", "unique_id": "model.jaffle_shop.stg_customers", "fqn": ["jaffle_shop", "staging", "stg_customers"], "alias": "stg_customers", "checksum": {"name": "sha256", "checksum": "6f18a29204dad1de6dbb0c288144c4990742e0a1e065c3b2a67b5f98334c22ba"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {"customer_id": {"name": "customer_id", "description": "", "meta": {}, "data_type": null, "quote": null, "tags": ["primary-key"]}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "jaffle_shop://models/staging/schema.yml", "build_path": "target/run/jaffle_shop/models/staging/stg_customers.sql", "deferred": false, "unrendered_config": {"materialized": "view"}, "created_at": 1680025829.869041, "relation_name": "\"jaffle_shop\".\"dev\".\"stg_customers\"", "raw_code": "with source as (\n\n {#-\n Normally we would select from the table here, but we are using seeds to load\n our data in this project\n #}\n select * from {{ ref('raw_customers') }}\n\n),\n\nrenamed as (\n\n select\n id as customer_id,\n first_name,\n last_name\n\n from source\n\n)\n\nselect * from renamed", "language": "sql", "refs": [["raw_customers"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.jaffle_shop.raw_customers"]}, "compiled_path": "target/compiled/jaffle_shop/models/staging/stg_customers.sql", "compiled": true, "compiled_code": "with source as (\n select * from \"jaffle_shop\".\"dev\".\"raw_customers\"\n\n),\n\nrenamed as (\n\n select\n id as customer_id,\n first_name,\n last_name\n\n from source\n\n)\n\nselect * from renamed", "extra_ctes_injected": true, "extra_ctes": []}, "model.jaffle_shop.stg_payments": {"database": "jaffle_shop", "schema": "dev", "name": "stg_payments", "resource_type": "model", "package_name": "jaffle_shop", "path": "staging/stg_payments.sql", "original_file_path": "models/staging/stg_payments.sql", "unique_id": "model.jaffle_shop.stg_payments", "fqn": ["jaffle_shop", "staging", "stg_payments"], "alias": "stg_payments", "checksum": {"name": "sha256", "checksum": "ec8712986e99fdea30feaba2144bb9eca2bb6dc862880cbb6e21831857595fe0"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {"payment_id": {"name": "payment_id", "description": "", "meta": {}, "data_type": null, "quote": null, "tags": ["primary-key"]}, "payment_method": {"name": "payment_method", "description": "", "meta": {}, "data_type": null, "quote": null, "tags": []}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "jaffle_shop://models/staging/schema.yml", "build_path": "target/run/jaffle_shop/models/staging/stg_payments.sql", "deferred": false, "unrendered_config": {"materialized": "view"}, "created_at": 1680025829.870419, "relation_name": "\"jaffle_shop\".\"dev\".\"stg_payments\"", "raw_code": "with source as (\n \n {#-\n Normally we would select from the table here, but we are using seeds to load\n our data in this project\n #}\n select * from {{ ref('raw_payments') }}\n\n),\n\nrenamed as (\n\n select\n id as payment_id,\n order_id,\n payment_method,\n\n -- `amount` is currently stored in cents, so we convert it to dollars\n amount / 100 as amount,\n amount as amount_cents\n\n from source\n\n)\n\nselect * from renamed", "language": "sql", "refs": [["raw_payments"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.jaffle_shop.raw_payments"]}, "compiled_path": "target/compiled/jaffle_shop/models/staging/stg_payments.sql", "compiled": true, "compiled_code": "with source as (\n select * from \"jaffle_shop\".\"dev\".\"raw_payments\"\n\n),\n\nrenamed as (\n\n select\n id as payment_id,\n order_id,\n payment_method,\n\n -- `amount` is currently stored in cents, so we convert it to dollars\n amount / 100 as amount,\n amount as amount_cents\n\n from source\n\n)\n\nselect * from renamed", "extra_ctes_injected": true, "extra_ctes": []}, "model.jaffle_shop.stg_orders": {"database": "jaffle_shop", "schema": "dev", "name": "stg_orders", "resource_type": "model", "package_name": "jaffle_shop", "path": "staging/stg_orders.sql", "original_file_path": "models/staging/stg_orders.sql", "unique_id": "model.jaffle_shop.stg_orders", "fqn": ["jaffle_shop", "staging", "stg_orders"], "alias": "stg_orders", "checksum": {"name": "sha256", "checksum": "afffa9cbc57e5fd2cf5898ebf571d444a62c9d6d7929d8133d30567fb9a2ce97"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {"order_id": {"name": "order_id", "description": "", "meta": {}, "data_type": null, "quote": null, "tags": ["primary-key"]}, "status": {"name": "status", "description": "", "meta": {}, "data_type": null, "quote": null, "tags": []}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "jaffle_shop://models/staging/schema.yml", "build_path": "target/run/jaffle_shop/models/staging/stg_orders.sql", "deferred": false, "unrendered_config": {"materialized": "view"}, "created_at": 1680025829.869803, "relation_name": "\"jaffle_shop\".\"dev\".\"stg_orders\"", "raw_code": "with source as (\n\n {#-\n Normally we would select from the table here, but we are using seeds to load\n our data in this project\n #}\n select * from {{ ref('raw_orders') }}\n\n),\n\nrenamed as (\n\n select\n id as order_id,\n user_id as customer_id,\n order_date,\n status\n\n from source\n\n)\n\nselect * from renamed", "language": "sql", "refs": [["raw_orders"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.jaffle_shop.raw_orders"]}, "compiled_path": "target/compiled/jaffle_shop/models/staging/stg_orders.sql", "compiled": true, "compiled_code": "with source as (\n select * from \"jaffle_shop\".\"dev\".\"raw_orders\"\n\n),\n\nrenamed as (\n\n select\n id as order_id,\n user_id as customer_id,\n order_date,\n status\n\n from source\n\n)\n\nselect * from renamed", "extra_ctes_injected": true, "extra_ctes": []}, "seed.jaffle_shop.raw_customers": {"database": "jaffle_shop", "schema": "dev", "name": "raw_customers", "resource_type": "seed", "package_name": "jaffle_shop", "path": "raw_customers.csv", "original_file_path": "seeds/raw_customers.csv", "unique_id": "seed.jaffle_shop.raw_customers", "fqn": ["jaffle_shop", "raw_customers"], "alias": "raw_customers", "checksum": {"name": "sha256", "checksum": "24579b4b26098d43265376f3c50be8b10faf8e8fd95f5508074f10f76a12671d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.8330948, "relation_name": "\"jaffle_shop\".\"dev\".\"raw_customers\"", "raw_code": "", "root_path": "/Users/dan/repos/jaffle_shop", "depends_on": {"macros": []}}, "seed.jaffle_shop.raw_orders": {"database": "jaffle_shop", "schema": "dev", "name": "raw_orders", "resource_type": "seed", "package_name": "jaffle_shop", "path": "raw_orders.csv", "original_file_path": "seeds/raw_orders.csv", "unique_id": "seed.jaffle_shop.raw_orders", "fqn": ["jaffle_shop", "raw_orders"], "alias": "raw_orders", "checksum": {"name": "sha256", "checksum": "ee6c68d1639ec2b23a4495ec12475e09b8ed4b61e23ab0411ea7ec76648356f7"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.834146, "relation_name": "\"jaffle_shop\".\"dev\".\"raw_orders\"", "raw_code": "", "root_path": "/Users/dan/repos/jaffle_shop", "depends_on": {"macros": []}}, "seed.jaffle_shop.raw_payments": {"database": "jaffle_shop", "schema": "dev", "name": "raw_payments", "resource_type": "seed", "package_name": "jaffle_shop", "path": "raw_payments.csv", "original_file_path": "seeds/raw_payments.csv", "unique_id": "seed.jaffle_shop.raw_payments", "fqn": ["jaffle_shop", "raw_payments"], "alias": "raw_payments", "checksum": {"name": "sha256", "checksum": "03fd407f3135f84456431a923f22fc185a2154079e210c20b690e3ab11687d11"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.8351219, "relation_name": "\"jaffle_shop\".\"dev\".\"raw_payments\"", "raw_code": "", "root_path": "/Users/dan/repos/jaffle_shop", "depends_on": {"macros": []}}, "test.jaffle_shop.unique_customers_customer_id.c5af1ff4b1": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('customers')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "unique_customers_customer_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "unique_customers_customer_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.unique_customers_customer_id.c5af1ff4b1", "fqn": ["jaffle_shop", "unique_customers_customer_id"], "alias": "unique_customers_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["primary-key"], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.850326, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["customers"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.jaffle_shop.customers"]}, "compiled_path": null, "column_name": "customer_id", "file_key_name": "models.customers"}, "test.jaffle_shop.not_null_customers_customer_id.5c9bf9911d": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('customers')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_customers_customer_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_customers_customer_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_customers_customer_id.5c9bf9911d", "fqn": ["jaffle_shop", "not_null_customers_customer_id"], "alias": "not_null_customers_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["primary-key"], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.851261, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["customers"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.customers"]}, "compiled_path": null, "column_name": "customer_id", "file_key_name": "models.customers"}, "test.jaffle_shop.unique_orders_order_id.fed79b3a6e": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "order_id", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "unique_orders_order_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "unique_orders_order_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.unique_orders_order_id.fed79b3a6e", "fqn": ["jaffle_shop", "unique_orders_order_id"], "alias": "unique_orders_order_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["primary-key"], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.852126, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "column_name": "order_id", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_order_id.cf6c17daed": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "order_id", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_order_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_order_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_order_id.cf6c17daed", "fqn": ["jaffle_shop", "not_null_orders_order_id"], "alias": "not_null_orders_order_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["primary-key"], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.852982, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "column_name": "order_id", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_customer_id.c5f02694af": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_customer_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_customer_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_customer_id.c5f02694af", "fqn": ["jaffle_shop", "not_null_orders_customer_id"], "alias": "not_null_orders_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.8539362, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "column_name": "customer_id", "file_key_name": "models.orders"}, "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.c6ec7f58f2": {"test_metadata": {"name": "relationships", "kwargs": {"to": "ref('customers')", "field": "customer_id", "column_name": "customer_id", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "relationships_orders_customer_id__customer_id__ref_customers_", "resource_type": "test", "package_name": "jaffle_shop", "path": "relationships_orders_customer_id__customer_id__ref_customers_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.c6ec7f58f2", "fqn": ["jaffle_shop", "relationships_orders_customer_id__customer_id__ref_customers_"], "alias": "relationships_orders_customer_id__customer_id__ref_customers_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.854784, "relation_name": null, "raw_code": "{{ test_relationships(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["customers"], ["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_relationships", "macro.dbt.get_where_subquery"], "nodes": ["model.jaffle_shop.customers", "model.jaffle_shop.orders"]}, "compiled_path": null, "column_name": "customer_id", "file_key_name": "models.orders"}, "test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3": {"test_metadata": {"name": "accepted_values", "kwargs": {"values": ["placed", "shipped", "completed", "return_pending", "returned"], "column_name": "status", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "accepted_values_orders_status__placed__shipped__completed__return_pending__returned", "resource_type": "test", "package_name": "jaffle_shop", "path": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3", "fqn": ["jaffle_shop", "accepted_values_orders_status__placed__shipped__completed__return_pending__returned"], "alias": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"alias": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758"}, "created_at": 1680025829.85997, "relation_name": null, "raw_code": "{{ test_accepted_values(**_dbt_generic_test_kwargs) }}{{ config(alias=\"accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758\") }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_accepted_values", "macro.dbt.get_where_subquery"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "column_name": "status", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_amount.106140f9fd": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_amount", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_amount.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_amount.106140f9fd", "fqn": ["jaffle_shop", "not_null_orders_amount"], "alias": "not_null_orders_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.864409, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "column_name": "amount", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_credit_card_amount.d3ca593b59": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "credit_card_amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_credit_card_amount", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_credit_card_amount.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_credit_card_amount.d3ca593b59", "fqn": ["jaffle_shop", "not_null_orders_credit_card_amount"], "alias": "not_null_orders_credit_card_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.865262, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "column_name": "credit_card_amount", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_coupon_amount.ab90c90625": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "coupon_amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_coupon_amount", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_coupon_amount.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_coupon_amount.ab90c90625", "fqn": ["jaffle_shop", "not_null_orders_coupon_amount"], "alias": "not_null_orders_coupon_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.866215, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "column_name": "coupon_amount", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_bank_transfer_amount.7743500c49": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "bank_transfer_amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_bank_transfer_amount", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_bank_transfer_amount.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_bank_transfer_amount.7743500c49", "fqn": ["jaffle_shop", "not_null_orders_bank_transfer_amount"], "alias": "not_null_orders_bank_transfer_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.867062, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "column_name": "bank_transfer_amount", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_gift_card_amount.413a0d2d7a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "gift_card_amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_gift_card_amount", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_gift_card_amount.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_gift_card_amount.413a0d2d7a", "fqn": ["jaffle_shop", "not_null_orders_gift_card_amount"], "alias": "not_null_orders_gift_card_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.8679218, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "column_name": "gift_card_amount", "file_key_name": "models.orders"}, "test.jaffle_shop.unique_stg_customers_customer_id.c7614daada": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('stg_customers')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "unique_stg_customers_customer_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "unique_stg_customers_customer_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.unique_stg_customers_customer_id.c7614daada", "fqn": ["jaffle_shop", "staging", "unique_stg_customers_customer_id"], "alias": "unique_stg_customers_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["primary-key"], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.870772, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["stg_customers"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.jaffle_shop.stg_customers"]}, "compiled_path": null, "column_name": "customer_id", "file_key_name": "models.stg_customers"}, "test.jaffle_shop.not_null_stg_customers_customer_id.e2cfb1f9aa": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('stg_customers')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_stg_customers_customer_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_stg_customers_customer_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.not_null_stg_customers_customer_id.e2cfb1f9aa", "fqn": ["jaffle_shop", "staging", "not_null_stg_customers_customer_id"], "alias": "not_null_stg_customers_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["primary-key"], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.871656, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["stg_customers"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.stg_customers"]}, "compiled_path": null, "column_name": "customer_id", "file_key_name": "models.stg_customers"}, "test.jaffle_shop.unique_stg_orders_order_id.e3b841c71a": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "order_id", "model": "{{ get_where_subquery(ref('stg_orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "unique_stg_orders_order_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "unique_stg_orders_order_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.unique_stg_orders_order_id.e3b841c71a", "fqn": ["jaffle_shop", "staging", "unique_stg_orders_order_id"], "alias": "unique_stg_orders_order_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["primary-key"], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.87264, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["stg_orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.jaffle_shop.stg_orders"]}, "compiled_path": null, "column_name": "order_id", "file_key_name": "models.stg_orders"}, "test.jaffle_shop.not_null_stg_orders_order_id.81cfe2fe64": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "order_id", "model": "{{ get_where_subquery(ref('stg_orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_stg_orders_order_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_stg_orders_order_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.not_null_stg_orders_order_id.81cfe2fe64", "fqn": ["jaffle_shop", "staging", "not_null_stg_orders_order_id"], "alias": "not_null_stg_orders_order_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["primary-key"], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.873489, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["stg_orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.stg_orders"]}, "compiled_path": null, "column_name": "order_id", "file_key_name": "models.stg_orders"}, "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad": {"test_metadata": {"name": "accepted_values", "kwargs": {"values": ["placed", "shipped", "completed", "return_pending", "returned"], "column_name": "status", "model": "{{ get_where_subquery(ref('stg_orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned", "resource_type": "test", "package_name": "jaffle_shop", "path": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad", "fqn": ["jaffle_shop", "staging", "accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned"], "alias": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"alias": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58"}, "created_at": 1680025829.874337, "relation_name": null, "raw_code": "{{ test_accepted_values(**_dbt_generic_test_kwargs) }}{{ config(alias=\"accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58\") }}", "language": "sql", "refs": [["stg_orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_accepted_values", "macro.dbt.get_where_subquery"], "nodes": ["model.jaffle_shop.stg_orders"]}, "compiled_path": null, "column_name": "status", "file_key_name": "models.stg_orders"}, "test.jaffle_shop.unique_stg_payments_payment_id.3744510712": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "payment_id", "model": "{{ get_where_subquery(ref('stg_payments')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "unique_stg_payments_payment_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "unique_stg_payments_payment_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.unique_stg_payments_payment_id.3744510712", "fqn": ["jaffle_shop", "staging", "unique_stg_payments_payment_id"], "alias": "unique_stg_payments_payment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["primary-key"], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.877381, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["stg_payments"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.jaffle_shop.stg_payments"]}, "compiled_path": null, "column_name": "payment_id", "file_key_name": "models.stg_payments"}, "test.jaffle_shop.not_null_stg_payments_payment_id.c19cc50075": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "payment_id", "model": "{{ get_where_subquery(ref('stg_payments')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_stg_payments_payment_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_stg_payments_payment_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.not_null_stg_payments_payment_id.c19cc50075", "fqn": ["jaffle_shop", "staging", "not_null_stg_payments_payment_id"], "alias": "not_null_stg_payments_payment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["primary-key"], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1680025829.8782341, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["stg_payments"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.stg_payments"]}, "compiled_path": null, "column_name": "payment_id", "file_key_name": "models.stg_payments"}, "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278": {"test_metadata": {"name": "accepted_values", "kwargs": {"values": ["credit_card", "coupon", "bank_transfer", "gift_card"], "column_name": "payment_method", "model": "{{ get_where_subquery(ref('stg_payments')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card", "resource_type": "test", "package_name": "jaffle_shop", "path": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278", "fqn": ["jaffle_shop", "staging", "accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card"], "alias": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"alias": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef"}, "created_at": 1680025829.87921, "relation_name": null, "raw_code": "{{ test_accepted_values(**_dbt_generic_test_kwargs) }}{{ config(alias=\"accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef\") }}", "language": "sql", "refs": [["stg_payments"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_accepted_values", "macro.dbt.get_where_subquery"], "nodes": ["model.jaffle_shop.stg_payments"]}, "compiled_path": null, "column_name": "payment_method", "file_key_name": "models.stg_payments"}}, "sources": {}, "macros": {"macro.dbt_duckdb.duckdb__get_binding_char": {"name": "duckdb__get_binding_char", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/seed.sql", "original_file_path": "macros/seed.sql", "unique_id": "macro.dbt_duckdb.duckdb__get_binding_char", "macro_sql": "{% macro duckdb__get_binding_char() %}\n {{ return('?') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.528875, "supported_languages": null}, "macro.dbt_duckdb.duckdb__get_batch_size": {"name": "duckdb__get_batch_size", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/seed.sql", "original_file_path": "macros/seed.sql", "unique_id": "macro.dbt_duckdb.duckdb__get_batch_size", "macro_sql": "{% macro duckdb__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.529001, "supported_languages": null}, "macro.dbt_duckdb.duckdb__load_csv_rows": {"name": "duckdb__load_csv_rows", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/seed.sql", "original_file_path": "macros/seed.sql", "unique_id": "macro.dbt_duckdb.duckdb__load_csv_rows", "macro_sql": "{% macro duckdb__load_csv_rows(model, agate_table) %}\n {% set batch_size = get_batch_size() %}\n {% set agate_table = adapter.convert_datetimes_to_strs(agate_table) %}\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5302458, "supported_languages": null}, "macro.dbt_duckdb.duckdb__snapshot_merge_sql": {"name": "duckdb__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/snapshot_merge.sql", "original_file_path": "macros/snapshot_merge.sql", "unique_id": "macro.dbt_duckdb.duckdb__snapshot_merge_sql", "macro_sql": "{% macro duckdb__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n {% set insert_sql %}\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type = 'insert';\n {% endset %}\n\n {% do adapter.add_query(insert_sql, auto_begin=False) %}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id = {{ target.identifier }}.dbt_scd_id\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'update'\n and {{ target.identifier }}.dbt_valid_to is null;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5310872, "supported_languages": null}, "macro.dbt_duckdb.duckdb__get_catalog": {"name": "duckdb__get_catalog", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_duckdb.duckdb__get_catalog", "macro_sql": "{% macro duckdb__get_catalog(information_schema, schemas) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n select\n '{{ database }}' as table_database,\n t.table_schema,\n t.table_name,\n t.table_type,\n '' as table_comment,\n c.column_name,\n c.ordinal_position as column_index,\n c.data_type column_type,\n '' as column_comment,\n '' as table_owner\n FROM information_schema.tables t JOIN information_schema.columns c ON t.table_schema = c.table_schema AND t.table_name = c.table_name\n WHERE (\n {%- for schema in schemas -%}\n upper(t.table_schema) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n AND t.table_type IN ('BASE TABLE', 'VIEW')\n ORDER BY\n t.table_schema,\n t.table_name,\n c.ordinal_position\n {%- endcall -%}\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.531669, "supported_languages": null}, "macro.dbt_duckdb.duckdb__create_schema": {"name": "duckdb__create_schema", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__create_schema", "macro_sql": "{% macro duckdb__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=adapter.use_database()) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.541016, "supported_languages": null}, "macro.dbt_duckdb.duckdb__drop_schema": {"name": "duckdb__drop_schema", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__drop_schema", "macro_sql": "{% macro duckdb__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=adapter.use_database()) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.541246, "supported_languages": null}, "macro.dbt_duckdb.duckdb__list_schemas": {"name": "duckdb__list_schemas", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__list_schemas", "macro_sql": "{% macro duckdb__list_schemas(database) -%}\n {% set sql %}\n select schema_name\n from information_schema.schemata\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.541417, "supported_languages": null}, "macro.dbt_duckdb.duckdb__check_schema_exists": {"name": "duckdb__check_schema_exists", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__check_schema_exists", "macro_sql": "{% macro duckdb__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from information_schema.schemata\n where schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.541627, "supported_languages": null}, "macro.dbt_duckdb.duckdb__create_table_as": {"name": "duckdb__create_table_as", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__create_table_as", "macro_sql": "{% macro duckdb__create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {%- if language == 'sql' -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary and adapter.use_database()), schema=(not temporary)) }}\n as (\n {{ compiled_code }}\n );\n {%- elif language == 'python' -%}\n {{ py_write_table(temporary=temporary, relation=relation, compiled_code=compiled_code) }}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"duckdb__create_table_as macro didn't get supported language, it got %s\" % language) %}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.py_write_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.542319, "supported_languages": null}, "macro.dbt_duckdb.py_write_table": {"name": "py_write_table", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.py_write_table", "macro_sql": "{% macro py_write_table(temporary, relation, compiled_code) -%}\n{{ compiled_code }}\n\ndef materialize(df, con):\n try:\n import pyarrow\n except ImportError:\n pass\n finally:\n if isinstance(df, pyarrow.Table):\n # https://github.com/duckdb/duckdb/issues/6584\n import pyarrow.dataset\n con.execute('create table {{ relation.include(database=adapter.use_database()) }} as select * from df')\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.542516, "supported_languages": null}, "macro.dbt_duckdb.duckdb__create_view_as": {"name": "duckdb__create_view_as", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__create_view_as", "macro_sql": "{% macro duckdb__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation.include(database=adapter.use_database()) }} as (\n {{ sql }}\n );\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.542791, "supported_languages": null}, "macro.dbt_duckdb.duckdb__get_columns_in_relation": {"name": "duckdb__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__get_columns_in_relation", "macro_sql": "{% macro duckdb__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from information_schema.columns\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.543176, "supported_languages": null}, "macro.dbt_duckdb.duckdb__list_relations_without_caching": {"name": "duckdb__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__list_relations_without_caching", "macro_sql": "{% macro duckdb__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n table_name as name,\n table_schema as schema,\n CASE table_type\n WHEN 'BASE TABLE' THEN 'table'\n WHEN 'VIEW' THEN 'view'\n WHEN 'LOCAL TEMPORARY' THEN 'table'\n END as type\n from information_schema.tables\n where table_schema = '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.54345, "supported_languages": null}, "macro.dbt_duckdb.duckdb__drop_relation": {"name": "duckdb__drop_relation", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__drop_relation", "macro_sql": "{% macro duckdb__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation.include(database=adapter.use_database()) }} cascade\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.543704, "supported_languages": null}, "macro.dbt_duckdb.duckdb__truncate_relation": {"name": "duckdb__truncate_relation", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__truncate_relation", "macro_sql": "{% macro duckdb__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n DELETE FROM {{ relation.include(database=adapter.use_database()) }} WHERE 1=1\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5438988, "supported_languages": null}, "macro.dbt_duckdb.duckdb__rename_relation": {"name": "duckdb__rename_relation", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__rename_relation", "macro_sql": "{% macro duckdb__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter {{ to_relation.type }} {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.544176, "supported_languages": null}, "macro.dbt_duckdb.duckdb__make_temp_relation": {"name": "duckdb__make_temp_relation", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__make_temp_relation", "macro_sql": "{% macro duckdb__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix ~ py_current_timestring() %}\n {% do return(base_relation.incorporate(\n path={\n \"identifier\": tmp_identifier,\n \"schema\": none,\n \"database\": none\n })) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.py_current_timestring"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.544481, "supported_languages": null}, "macro.dbt_duckdb.duckdb__current_timestamp": {"name": "duckdb__current_timestamp", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__current_timestamp", "macro_sql": "{% macro duckdb__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5445492, "supported_languages": null}, "macro.dbt_duckdb.duckdb__snapshot_string_as_time": {"name": "duckdb__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__snapshot_string_as_time", "macro_sql": "{% macro duckdb__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.544718, "supported_languages": null}, "macro.dbt_duckdb.duckdb__snapshot_get_time": {"name": "duckdb__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__snapshot_get_time", "macro_sql": "{% macro duckdb__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5448031, "supported_languages": null}, "macro.dbt_duckdb.duckdb__get_incremental_default_sql": {"name": "duckdb__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__get_incremental_default_sql", "macro_sql": "{% macro duckdb__get_incremental_default_sql(arg_dict) %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.545005, "supported_languages": null}, "macro.dbt_duckdb.duckdb__get_incremental_delete_insert_sql": {"name": "duckdb__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__get_incremental_delete_insert_sql", "macro_sql": "{% macro duckdb__get_incremental_delete_insert_sql(arg_dict) %}\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"].include(database=adapter.use_database()), arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"])) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.54529, "supported_languages": null}, "macro.dbt_duckdb.duckdb__get_incremental_append_sql": {"name": "duckdb__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__get_incremental_append_sql", "macro_sql": "{% macro duckdb__get_incremental_append_sql(arg_dict) %}\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"].include(database=adapter.use_database()), arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5455391, "supported_languages": null}, "macro.dbt_duckdb.location_exists": {"name": "location_exists", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.location_exists", "macro_sql": "{% macro location_exists(location) -%}\n {% do return(adapter.location_exists(location)) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.545696, "supported_languages": null}, "macro.dbt_duckdb.write_to_file": {"name": "write_to_file", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.write_to_file", "macro_sql": "{% macro write_to_file(relation, location, options) -%}\n {% call statement('write_to_file') -%}\n copy {{ relation }} to '{{ location }}' ({{ options }})\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.54589, "supported_languages": null}, "macro.dbt_duckdb.register_glue_table": {"name": "register_glue_table", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.register_glue_table", "macro_sql": "{% macro register_glue_table(register, glue_database, relation, location, format) -%}\n {% if location.startswith(\"s3://\") and register == true %}\n {%- set column_list = adapter.get_columns_in_relation(relation) -%}\n {% do adapter.register_glue_table(glue_database, relation.identifier, column_list, location, format) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5462441, "supported_languages": null}, "macro.dbt_duckdb.render_write_options": {"name": "render_write_options", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.render_write_options", "macro_sql": "{% macro render_write_options(config) -%}\n {% set options = config.get('options', {}) %}\n {% for k in options %}\n {% if options[k] is string %}\n {% set _ = options.update({k: render(options[k])}) %}\n {% else %}\n {% set _ = options.update({k: render(options[k])}) %}\n {% endif %}\n {% endfor %}\n\n {# legacy top-level write options #}\n {% if config.get('format') %}\n {% set _ = options.update({'format': render(config.get('format'))}) %}\n {% endif %}\n {% if config.get('delimiter') %}\n {% set _ = options.update({'delimiter': render(config.get('delimiter'))}) %}\n {% endif %}\n\n {% do return(options) %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.54719, "supported_languages": null}, "macro.dbt_duckdb.materialization_table_duckdb": {"name": "materialization_table_duckdb", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/materializations/table.sql", "original_file_path": "macros/materializations/table.sql", "unique_id": "macro.dbt_duckdb.materialization_table_duckdb", "macro_sql": "{% materialization table, adapter=\"duckdb\", supported_languages=['sql', 'python'] %}\n\n {%- set language = model['language'] -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main', language=language) -%}\n {{- create_table_as(False, intermediate_relation, compiled_code, language) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.549718, "supported_languages": ["sql", "python"]}, "macro.dbt_duckdb.materialization_external_duckdb": {"name": "materialization_external_duckdb", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/materializations/external.sql", "original_file_path": "macros/materializations/external.sql", "unique_id": "macro.dbt_duckdb.materialization_external_duckdb", "macro_sql": "{% materialization external, adapter=\"duckdb\", supported_languages=['sql', 'python'] %}\n\n {%- set location = render(config.get('location', default=external_location(this, config))) -%})\n {%- set rendered_options = render_write_options(config) -%}\n {%- set write_options = adapter.external_write_options(location, rendered_options) -%}\n {%- set read_location = adapter.external_read_location(location, rendered_options) -%}\n\n -- set language - python or sql\n {%- set language = model['language'] -%}\n\n {%- set target_relation = this.incorporate(type='view') %}\n\n -- Continue as normal materialization\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set temp_relation = make_intermediate_relation(this.incorporate(type='table'), suffix='__dbt_tmp') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation, suffix='__dbt_int') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_temp_relation = load_cached_relation(temp_relation) -%}\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_temp_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('create_table', language=language) -%}\n {{- create_table_as(False, temp_relation, compiled_code, language) }}\n {%- endcall %}\n\n -- write an temp relation into file\n {{ write_to_file(temp_relation, location, write_options) }}\n -- create a view on top of the location\n {% call statement('main', language='sql') -%}\n create or replace view {{ intermediate_relation.include(database=adapter.use_database()) }} as (\n select * from '{{ read_location }}'\n );\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(temp_relation) }}\n\n -- register table into glue\n {%- set glue_register = config.get('glue_register', default=false) -%}\n {%- set glue_database = render(config.get('glue_database', default='default')) -%}\n {% do register_glue_table(glue_register, glue_database, target_relation, location, format) %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt_duckdb.external_location", "macro.dbt_duckdb.render_write_options", "macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt_duckdb.write_to_file", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt_duckdb.register_glue_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.55407, "supported_languages": ["sql", "python"]}, "macro.dbt_duckdb.materialization_incremental_duckdb": {"name": "materialization_incremental_duckdb", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/materializations/incremental.sql", "original_file_path": "macros/materializations/incremental.sql", "unique_id": "macro.dbt_duckdb.materialization_incremental_duckdb", "macro_sql": "{% materialization incremental, adapter=\"duckdb\", supported_languages=['sql', 'python'] -%}\n\n {%- set language = model['language'] -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, compiled_code, language) %}\n {% elif full_refresh_mode %}\n {% set build_sql = create_table_as(False, intermediate_relation, compiled_code, language) %}\n {% set need_swap = true %}\n {% else %}\n {% if language == 'python' %}\n {% set build_python = create_table_as(False, temp_relation, compiled_code, language) %}\n {% call statement(\"pre\", language=language) %}\n {{- build_python }}\n {% endcall %}\n {% else %} {# SQL #}\n {% do run_query(create_table_as(True, temp_relation, compiled_code, language)) %}\n {% endif %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n {% set language = \"sql\" %}\n\n {% endif %}\n\n {% call statement(\"main\", language=language) %}\n {{- build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.statement", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.559038, "supported_languages": ["sql", "python"]}, "macro.dbt_duckdb.duckdb__dateadd": {"name": "duckdb__dateadd", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_duckdb.duckdb__dateadd", "macro_sql": "{% macro duckdb__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5592582, "supported_languages": null}, "macro.dbt_duckdb.duckdb__listagg": {"name": "duckdb__listagg", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_duckdb.duckdb__listagg", "macro_sql": "{% macro duckdb__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n {% if limit_num -%}\n list_aggr(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n 'string_agg',\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.559789, "supported_languages": null}, "macro.dbt_duckdb.duckdb__datediff": {"name": "duckdb__datediff", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_duckdb.duckdb__datediff", "macro_sql": "{% macro duckdb__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.562541, "supported_languages": null}, "macro.dbt_duckdb.duckdb__any_value": {"name": "duckdb__any_value", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_duckdb.duckdb__any_value", "macro_sql": "{% macro duckdb__any_value(expression) -%}\n\n arbitrary({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.562667, "supported_languages": null}, "macro.dbt_duckdb.register_upstream_external_models": {"name": "register_upstream_external_models", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/upstream.sql", "original_file_path": "macros/utils/upstream.sql", "unique_id": "macro.dbt_duckdb.register_upstream_external_models", "macro_sql": "{%- macro register_upstream_external_models() -%}\n{% if execute %}\n{% set upstream_nodes = {} %}\n{% set upstream_schemas = {} %}\n{% for node in selected_resources %}\n {% for upstream_node in graph['nodes'][node]['depends_on']['nodes'] %}\n {% if upstream_node not in upstream_nodes and upstream_node not in selected_resources %}\n {% do upstream_nodes.update({upstream_node: None}) %}\n {% set upstream = graph['nodes'].get(upstream_node) %}\n {% if upstream\n and upstream.resource_type in ('model', 'seed')\n and upstream.config.materialized=='external'\n %}\n {%- set upstream_rel = api.Relation.create(\n database=upstream['database'],\n schema=upstream['schema'],\n identifier=upstream['alias']\n ) -%}\n {%- set location = upstream.config.get('location', external_location(upstream, upstream.config)) -%}\n {%- set rendered_options = render_write_options(config) -%}\n {%- set upstream_location = adapter.external_read_location(location, rendered_options) -%}\n {% if upstream_rel.schema not in upstream_schemas %}\n {% call statement('main', language='sql') -%}\n create schema if not exists {{ upstream_rel.schema }}\n {%- endcall %}\n {% do upstream_schemas.update({upstream_rel.schema: None}) %}\n {% endif %}\n {% call statement('main', language='sql') -%}\n create or replace view {{ upstream_rel.include(database=adapter.use_database()) }} as (\n select * from '{{ upstream_location }}'\n );\n {%- endcall %}\n {%- endif %}\n {% endif %}\n {% endfor %}\n{% endfor %}\n{% do adapter.commit() %}\n{% endif %}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_duckdb.external_location", "macro.dbt_duckdb.render_write_options", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.564817, "supported_languages": null}, "macro.dbt_duckdb.duckdb__split_part": {"name": "duckdb__split_part", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/splitpart.sql", "original_file_path": "macros/utils/splitpart.sql", "unique_id": "macro.dbt_duckdb.duckdb__split_part", "macro_sql": "{% macro duckdb__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n coalesce(string_split({{ string_text }}, {{ delimiter_text }})[ {{ part_number }} ], '')\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.565182, "supported_languages": null}, "macro.dbt_duckdb.duckdb__last_day": {"name": "duckdb__last_day", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/lastday.sql", "original_file_path": "macros/utils/lastday.sql", "unique_id": "macro.dbt_duckdb.duckdb__last_day", "macro_sql": "{% macro duckdb__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- duckdb dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.565612, "supported_languages": null}, "macro.dbt_duckdb.external_location": {"name": "external_location", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/external_location.sql", "original_file_path": "macros/utils/external_location.sql", "unique_id": "macro.dbt_duckdb.external_location", "macro_sql": "{%- macro external_location(relation, config) -%}\n {%- if config.get('options', {}).get('partition_by') is none -%}\n {%- set format = config.get('format', 'parquet') -%}\n {{- adapter.external_root() }}/{{ relation.identifier }}.{{ format }}\n {%- else -%}\n {{- adapter.external_root() }}/{{ relation.identifier }}\n {%- endif -%}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.566093, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.567016, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5671868, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.567308, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.567431, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.567552, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.567872, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.568131, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.568392, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.568773, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.569004, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.572018, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.572164, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5723531, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.572973, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.573117, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.573271, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n select {{ check_cols_config | join(', ') }} from ({{ node['compiled_code'] }}) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.574463, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5756218, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.579034, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.579274, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.579415, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.579489, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.579612, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.579707, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.579885, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.58063, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.580791, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.581003, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.581364, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5864599, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5882041, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.588593, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.588854, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.589171, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.589488, "supported_languages": null}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5908349, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.591302, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5918689, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.5920708, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.592672, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.597745, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.599158, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.599381, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.600228, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.600458, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6010041, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.60154, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.602304, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.602527, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6026812, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.602931, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6030898, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6033401, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.603498, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.603723, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6038861, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.604013, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.604246, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6085372, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.613145, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.614183, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.615208, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.615935, "supported_languages": null}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6181371, "supported_languages": ["sql"]}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.618577, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.618732, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.619096, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.61945, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.62165, "supported_languages": ["sql"]}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.621908, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6220968, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=True) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.623378, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.623689, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.623824, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.623976, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.624196, "supported_languages": null}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.627002, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.631079, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.63184, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.632041, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.632442, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.632604, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.632715, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6328309, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.632926, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.633058, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.633155, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.633557, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6337159, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.634801, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.635134, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6353269, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.635782, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.636002, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.636239, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.636579, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.636798, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6371, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.637353, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.637565, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6380422, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.639293, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6397882, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.640039, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.641572, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.642743, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6433878, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6435862, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6437812, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.643842, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6441212, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.644257, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.644465, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.64457, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.644785, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.644872, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6451578, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.645298, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.64549, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6455529, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.64578, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.645895, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.646142, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.646257, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.646784, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6681972, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.668546, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.668691, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.668951, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6690712, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.669299, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.669434, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6696472, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.669782, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.669991, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.670079, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6703281, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.670441, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.670652, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.670739, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.671568, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6716979, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.671838, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6720278, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6721652, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.672292, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.672431, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.672582, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.672719, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.672846, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.672983, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.673106, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.673244, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.673367, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.673606, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.673718, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.673928, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.674016, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.674309, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.674582, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.674709, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.675154, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.675293, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.67554, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.675781, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.67589, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.676214, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.676425, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.676668, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.676778, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6771042, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.677264, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.677402, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.677574, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.677997, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6781251, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.678248, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.678338, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.678478, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.678539, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6786802, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt.default__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.678824, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6792178, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.679399, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.679533, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.679878, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.682687, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.682827, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.683017, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.683267, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.68348, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.683755, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.683913, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.684098, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.684253, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6843882, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.684561, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.684798, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.685013, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.685487, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.68575, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6858728, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6860359, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.686399, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.686757, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.688231, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.688329, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.688475, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.688568, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.688861, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.689023, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.689109, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.689307, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.689466, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.689657, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6898172, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.690012, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.690586, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.690748, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.690955, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.691225, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.692201, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.692799, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.692943, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6931179, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.69326, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.693491, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.693917, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.695193, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6954129, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.695571, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.695704, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6958551, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.696067, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6962419, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.696498, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.696657, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6967921, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.698245, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.698378, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.69872, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.698879, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.699171, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.699368, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.6998851, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.700109, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.700779, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.701974, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.702105, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {%- set resolved = ref(*_ref) -%}\n {%- do ref_dict.update({_ref | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args,dbt_load_df_function):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.702455, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.702801, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.7033, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args: ref(*args, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.703707, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.7037702, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.704208, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.704405, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.704649, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1680025829.704882, "supported_languages": null}}, "docs": {"doc.jaffle_shop.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "jaffle_shop", "path": "overview.md", "original_file_path": "models/overview.md", "unique_id": "doc.jaffle_shop.__overview__", "block_contents": "## Data Documentation for Jaffle Shop\n\n`jaffle_shop` is a fictional ecommerce store.\n\nThis [dbt](https://www.getdbt.com/) project is for testing out code.\n\nThe source code can be found [here](https://github.com/clrcrl/jaffle_shop)."}, "doc.jaffle_shop.orders_status": {"name": "orders_status", "resource_type": "doc", "package_name": "jaffle_shop", "path": "docs.md", "original_file_path": "models/docs.md", "unique_id": "doc.jaffle_shop.orders_status", "block_contents": "Orders can be one of the following statuses:\n\n| status | description |\n|----------------|------------------------------------------------------------------------------------------------------------------------|\n| placed | The order has been placed but has not yet left the warehouse |\n| shipped | The order has ben shipped to the customer and is currently in transit |\n| completed | The order has been received by the customer |\n| return_pending | The customer has indicated that they would like to return the order, but it has not yet been received at the warehouse |\n| returned | The order has been returned by the customer and received at the warehouse |"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {}, "selectors": {}, "disabled": {}, "parent_map": {"model.jaffle_shop.customers": ["model.jaffle_shop.stg_customers", "model.jaffle_shop.stg_orders", "model.jaffle_shop.stg_payments"], "model.jaffle_shop.orders": ["model.jaffle_shop.stg_orders", "model.jaffle_shop.stg_payments"], "model.jaffle_shop.stg_customers": ["seed.jaffle_shop.raw_customers"], "model.jaffle_shop.stg_payments": ["seed.jaffle_shop.raw_payments"], "model.jaffle_shop.stg_orders": ["seed.jaffle_shop.raw_orders"], "seed.jaffle_shop.raw_customers": [], "seed.jaffle_shop.raw_orders": [], "seed.jaffle_shop.raw_payments": [], "test.jaffle_shop.unique_customers_customer_id.c5af1ff4b1": ["model.jaffle_shop.customers"], "test.jaffle_shop.not_null_customers_customer_id.5c9bf9911d": ["model.jaffle_shop.customers"], "test.jaffle_shop.unique_orders_order_id.fed79b3a6e": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_order_id.cf6c17daed": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_customer_id.c5f02694af": ["model.jaffle_shop.orders"], "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.c6ec7f58f2": ["model.jaffle_shop.customers", "model.jaffle_shop.orders"], "test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_amount.106140f9fd": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_credit_card_amount.d3ca593b59": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_coupon_amount.ab90c90625": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_bank_transfer_amount.7743500c49": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_gift_card_amount.413a0d2d7a": ["model.jaffle_shop.orders"], "test.jaffle_shop.unique_stg_customers_customer_id.c7614daada": ["model.jaffle_shop.stg_customers"], "test.jaffle_shop.not_null_stg_customers_customer_id.e2cfb1f9aa": ["model.jaffle_shop.stg_customers"], "test.jaffle_shop.unique_stg_orders_order_id.e3b841c71a": ["model.jaffle_shop.stg_orders"], "test.jaffle_shop.not_null_stg_orders_order_id.81cfe2fe64": ["model.jaffle_shop.stg_orders"], "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad": ["model.jaffle_shop.stg_orders"], "test.jaffle_shop.unique_stg_payments_payment_id.3744510712": ["model.jaffle_shop.stg_payments"], "test.jaffle_shop.not_null_stg_payments_payment_id.c19cc50075": ["model.jaffle_shop.stg_payments"], "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278": ["model.jaffle_shop.stg_payments"]}, "child_map": {"model.jaffle_shop.customers": ["test.jaffle_shop.not_null_customers_customer_id.5c9bf9911d", "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.c6ec7f58f2", "test.jaffle_shop.unique_customers_customer_id.c5af1ff4b1"], "model.jaffle_shop.orders": ["test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3", "test.jaffle_shop.not_null_orders_amount.106140f9fd", "test.jaffle_shop.not_null_orders_bank_transfer_amount.7743500c49", "test.jaffle_shop.not_null_orders_coupon_amount.ab90c90625", "test.jaffle_shop.not_null_orders_credit_card_amount.d3ca593b59", "test.jaffle_shop.not_null_orders_customer_id.c5f02694af", "test.jaffle_shop.not_null_orders_gift_card_amount.413a0d2d7a", "test.jaffle_shop.not_null_orders_order_id.cf6c17daed", "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.c6ec7f58f2", "test.jaffle_shop.unique_orders_order_id.fed79b3a6e"], "model.jaffle_shop.stg_customers": ["model.jaffle_shop.customers", "test.jaffle_shop.not_null_stg_customers_customer_id.e2cfb1f9aa", "test.jaffle_shop.unique_stg_customers_customer_id.c7614daada"], "model.jaffle_shop.stg_payments": ["model.jaffle_shop.customers", "model.jaffle_shop.orders", "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278", "test.jaffle_shop.not_null_stg_payments_payment_id.c19cc50075", "test.jaffle_shop.unique_stg_payments_payment_id.3744510712"], "model.jaffle_shop.stg_orders": ["model.jaffle_shop.customers", "model.jaffle_shop.orders", "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad", "test.jaffle_shop.not_null_stg_orders_order_id.81cfe2fe64", "test.jaffle_shop.unique_stg_orders_order_id.e3b841c71a"], "seed.jaffle_shop.raw_customers": ["model.jaffle_shop.stg_customers"], "seed.jaffle_shop.raw_orders": ["model.jaffle_shop.stg_orders"], "seed.jaffle_shop.raw_payments": ["model.jaffle_shop.stg_payments"], "test.jaffle_shop.unique_customers_customer_id.c5af1ff4b1": [], "test.jaffle_shop.not_null_customers_customer_id.5c9bf9911d": [], "test.jaffle_shop.unique_orders_order_id.fed79b3a6e": [], "test.jaffle_shop.not_null_orders_order_id.cf6c17daed": [], "test.jaffle_shop.not_null_orders_customer_id.c5f02694af": [], "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.c6ec7f58f2": [], "test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3": [], "test.jaffle_shop.not_null_orders_amount.106140f9fd": [], "test.jaffle_shop.not_null_orders_credit_card_amount.d3ca593b59": [], "test.jaffle_shop.not_null_orders_coupon_amount.ab90c90625": [], "test.jaffle_shop.not_null_orders_bank_transfer_amount.7743500c49": [], "test.jaffle_shop.not_null_orders_gift_card_amount.413a0d2d7a": [], "test.jaffle_shop.unique_stg_customers_customer_id.c7614daada": [], "test.jaffle_shop.not_null_stg_customers_customer_id.e2cfb1f9aa": [], "test.jaffle_shop.unique_stg_orders_order_id.e3b841c71a": [], "test.jaffle_shop.not_null_stg_orders_order_id.81cfe2fe64": [], "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad": [], "test.jaffle_shop.unique_stg_payments_payment_id.3744510712": [], "test.jaffle_shop.not_null_stg_payments_payment_id.c19cc50075": [], "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278": []}} \ No newline at end of file +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v11.json", "dbt_version": "1.7.3", "generated_at": "2024-01-31T18:05:25.193867Z", "invocation_id": "baf0db27-f48e-40a3-aa05-1b05b809ec5a", "env": {}, "project_name": "datafold_demo", "project_id": "e0814ec85ee7b1694571de4cad476003", "user_id": "f7f86073-1fa9-414c-9913-22c3648c2e32", "send_anonymous_usage_stats": true, "adapter_type": "duckdb"}, "nodes": {"model.datafold_demo.stg_customers": {"database": "jaffle_shop", "schema": "dev", "name": "stg_customers", "resource_type": "model", "package_name": "datafold_demo", "path": "staging/stg_customers.sql", "original_file_path": "models/staging/stg_customers.sql", "unique_id": "model.datafold_demo.stg_customers", "fqn": ["datafold_demo", "staging", "stg_customers"], "alias": "stg_customers", "checksum": {"name": "sha256", "checksum": "80e3223cd54387e11fa16cd0f4cbe15f8ff74dcd9900b93856b9e39416178c9d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {"customer_id": {"name": "customer_id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "datafold_demo://models/staging/schema.yml", "build_path": "target/run/datafold_demo/models/staging/stg_customers.sql", "deferred": false, "unrendered_config": {"materialized": "view"}, "created_at": 1706724325.78564, "relation_name": "\"jaffle_shop\".\"dev\".\"stg_customers\"", "raw_code": "with source as (\n\n {#-\n Normally we would select from the table here, but we are using seeds to load\n our data in this project\n #}\n select * from {{ ref('raw_customers') }}\n\n),\n\nrenamed as (\n\n select\n id as customer_id,\n first_name,\n last_name\n\n from source\n\n)\n\nselect * from renamed", "language": "sql", "refs": [{"name": "raw_customers", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.datafold_demo.raw_customers"]}, "compiled_path": "target/compiled/datafold_demo/models/staging/stg_customers.sql", "compiled": true, "compiled_code": "with source as (\n select * from \"jaffle_shop\".\"dev\".\"raw_customers\"\n\n),\n\nrenamed as (\n\n select\n id as customer_id,\n first_name,\n last_name\n\n from source\n\n)\n\nselect * from renamed", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.datafold_demo.stg_payments": {"database": "jaffle_shop", "schema": "dev", "name": "stg_payments", "resource_type": "model", "package_name": "datafold_demo", "path": "staging/stg_payments.sql", "original_file_path": "models/staging/stg_payments.sql", "unique_id": "model.datafold_demo.stg_payments", "fqn": ["datafold_demo", "staging", "stg_payments"], "alias": "stg_payments", "checksum": {"name": "sha256", "checksum": "9c1ee3bfb10e07c2dfc325d55925da0e521887136d9051768cb8acf09dc86bda"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {"payment_id": {"name": "payment_id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "payment_method": {"name": "payment_method", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "datafold_demo://models/staging/schema.yml", "build_path": "target/run/datafold_demo/models/staging/stg_payments.sql", "deferred": false, "unrendered_config": {"materialized": "view"}, "created_at": 1706724325.7870321, "relation_name": "\"jaffle_shop\".\"dev\".\"stg_payments\"", "raw_code": "with source as (\n \n {#-\n Normally we would select from the table here, but we are using seeds to load\n our data in this project\n #}\n select * from {{ ref('raw_payments') }}\n\n),\n\nrenamed as (\n\n select\n id as payment_id,\n order_id,\n payment_method,\n\n -- `amount` is currently stored in cents, so we convert it to dollars\n amount / 100 as amount\n\n from source\n\n)\n\nselect * from renamed", "language": "sql", "refs": [{"name": "raw_payments", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.datafold_demo.raw_payments"]}, "compiled_path": "target/compiled/datafold_demo/models/staging/stg_payments.sql", "compiled": true, "compiled_code": "with source as (\n select * from \"jaffle_shop\".\"dev\".\"raw_payments\"\n\n),\n\nrenamed as (\n\n select\n id as payment_id,\n order_id,\n payment_method,\n\n -- `amount` is currently stored in cents, so we convert it to dollars\n amount / 100 as amount\n\n from source\n\n)\n\nselect * from renamed", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.datafold_demo.stg_orders": {"database": "jaffle_shop", "schema": "dev", "name": "stg_orders", "resource_type": "model", "package_name": "datafold_demo", "path": "staging/stg_orders.sql", "original_file_path": "models/staging/stg_orders.sql", "unique_id": "model.datafold_demo.stg_orders", "fqn": ["datafold_demo", "staging", "stg_orders"], "alias": "stg_orders", "checksum": {"name": "sha256", "checksum": "f4f881cb09d2c4162200fc331d7401df6d1abd4fed492554a7db70dede347108"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {"order_id": {"name": "order_id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "datafold_demo://models/staging/schema.yml", "build_path": "target/run/datafold_demo/models/staging/stg_orders.sql", "deferred": false, "unrendered_config": {"materialized": "view"}, "created_at": 1706724325.786328, "relation_name": "\"jaffle_shop\".\"dev\".\"stg_orders\"", "raw_code": "with source as (\n\n {#-\n Normally we would select from the table here, but we are using seeds to load\n our data in this project\n #}\n select * from {{ ref('raw_orders') }}\n\n),\n\nrenamed as (\n\n select\n id as order_id,\n user_id as customer_id,\n order_date,\n status\n\n from source\n\n)\n\nselect * from renamed", "language": "sql", "refs": [{"name": "raw_orders", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.datafold_demo.raw_orders"]}, "compiled_path": "target/compiled/datafold_demo/models/staging/stg_orders.sql", "compiled": true, "compiled_code": "with source as (\n select * from \"jaffle_shop\".\"dev\".\"raw_orders\"\n\n),\n\nrenamed as (\n\n select\n id as order_id,\n user_id as customer_id,\n order_date,\n status\n\n from source\n\n)\n\nselect * from renamed", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.datafold_demo.customers": {"database": "jaffle_shop", "schema": "dev", "name": "customers", "resource_type": "model", "package_name": "datafold_demo", "path": "core/customers.sql", "original_file_path": "models/core/customers.sql", "unique_id": "model.datafold_demo.customers", "fqn": ["datafold_demo", "core", "customers"], "alias": "customers", "checksum": {"name": "sha256", "checksum": "60bd72e33da43fff3a7e7609135c17cd4468bd22afec0735dd36018bfb5af30a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "This table has basic information about a customer, as well as some derived facts based on a customer's orders", "columns": {"customer_id": {"name": "customer_id", "description": "This is a unique identifier for a customer", "meta": {"primary-key": true}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_name": {"name": "first_name", "description": "Customer's first name. PII.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_name": {"name": "last_name", "description": "Customer's last name. PII.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_order": {"name": "first_order", "description": "Date (UTC) of a customer's first order", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "most_recent_order": {"name": "most_recent_order", "description": "Date (UTC) of a customer's most recent order", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "number_of_orders": {"name": "number_of_orders", "description": "Count of the number of orders a customer has placed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_order_amount": {"name": "total_order_amount", "description": "Total value (AUD) of a customer's orders", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "datafold_demo://models/core/schema.yml", "build_path": "target/run/datafold_demo/models/core/customers.sql", "deferred": false, "unrendered_config": {"materialized": "table"}, "created_at": 1706724325.84845, "relation_name": "\"jaffle_shop\".\"dev\".\"customers\"", "raw_code": "with customers as (\n\n select * from {{ ref('stg_customers') }}\n\n),\n\norders as (\n\n select * from {{ ref('stg_orders') }}\n\n),\n\npayments as (\n\n select * from {{ ref('stg_payments') }}\n\n),\n\ncustomer_orders as (\n\n select\n customer_id,\n\n min(order_date) as first_order,\n max(order_date) as most_recent_order,\n count(order_id) as number_of_orders\n from orders\n\n group by customer_id\n\n),\n\ncustomer_payments as (\n\n select\n orders.customer_id,\n sum(amount) as total_amount\n\n from payments\n\n left join orders on\n payments.order_id = orders.order_id\n\n group by orders.customer_id\n\n),\n\nfinal as (\n\n select\n customers.customer_id,\n customers.first_name,\n customers.last_name,\n customer_orders.first_order,\n customer_orders.most_recent_order,\n customer_orders.number_of_orders,\n customer_payments.total_amount as customer_lifetime_value\n\n from customers\n\n left join customer_orders\n on customers.customer_id = customer_orders.customer_id\n\n left join customer_payments\n on customers.customer_id = customer_payments.customer_id\n\n)\n\nselect * from final", "language": "sql", "refs": [{"name": "stg_customers", "package": null, "version": null}, {"name": "stg_orders", "package": null, "version": null}, {"name": "stg_payments", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.datafold_demo.stg_customers", "model.datafold_demo.stg_orders", "model.datafold_demo.stg_payments"]}, "compiled_path": "target/compiled/datafold_demo/models/core/customers.sql", "compiled": true, "compiled_code": "with customers as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_customers\"\n\n),\n\norders as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_orders\"\n\n),\n\npayments as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_payments\"\n\n),\n\ncustomer_orders as (\n\n select\n customer_id,\n\n min(order_date) as first_order,\n max(order_date) as most_recent_order,\n count(order_id) as number_of_orders\n from orders\n\n group by customer_id\n\n),\n\ncustomer_payments as (\n\n select\n orders.customer_id,\n sum(amount) as total_amount\n\n from payments\n\n left join orders on\n payments.order_id = orders.order_id\n\n group by orders.customer_id\n\n),\n\nfinal as (\n\n select\n customers.customer_id,\n customers.first_name,\n customers.last_name,\n customer_orders.first_order,\n customer_orders.most_recent_order,\n customer_orders.number_of_orders,\n customer_payments.total_amount as customer_lifetime_value\n\n from customers\n\n left join customer_orders\n on customers.customer_id = customer_orders.customer_id\n\n left join customer_payments\n on customers.customer_id = customer_payments.customer_id\n\n)\n\nselect * from final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.datafold_demo.orders": {"database": "jaffle_shop", "schema": "dev", "name": "orders", "resource_type": "model", "package_name": "datafold_demo", "path": "core/orders.sql", "original_file_path": "models/core/orders.sql", "unique_id": "model.datafold_demo.orders", "fqn": ["datafold_demo", "core", "orders"], "alias": "orders", "checksum": {"name": "sha256", "checksum": "a81ee7b57492458b7c79e2dd9cc171b62be82d197123cb56c60bcea74d9e16a9"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {"datafold": {"datadiff": {"filter": "amount >= 0", "include_columns": ["order_id", "customer_id", "order_date", "amount", "credit_card_amount", "coupon_amount", "bank_transfer_amount", "gift_card_amount"], "exclude_columns": ["new_column"]}}}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "This table has basic information about orders, as well as some derived facts based on payments", "columns": {"order_id": {"name": "order_id", "description": "This is a unique identifier for an order", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "customer_id": {"name": "customer_id", "description": "Foreign key to the customers table", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "order_date": {"name": "order_date", "description": "Date (UTC) that the order was placed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "Orders can be one of the following statuses:\n\n| status | description |\n|----------------|------------------------------------------------------------------------------------------------------------------------|\n| placed | The order has been placed but has not yet left the warehouse |\n| shipped | The order has ben shipped to the customer and is currently in transit |\n| completed | The order has been received by the customer |\n| return_pending | The customer has indicated that they would like to return the order, but it has not yet been received at the warehouse |\n| returned | The order has been returned by the customer and received at the warehouse |", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "amount": {"name": "amount", "description": "Total amount (AUD) of the order", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "credit_card_amount": {"name": "credit_card_amount", "description": "Amount of the order (AUD) paid for by credit card", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "coupon_amount": {"name": "coupon_amount", "description": "Amount of the order (AUD) paid for by coupon", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "bank_transfer_amount": {"name": "bank_transfer_amount", "description": "Amount of the order (AUD) paid for by bank transfer", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "gift_card_amount": {"name": "gift_card_amount", "description": "Amount of the order (AUD) paid for by gift card", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {"datafold": {"datadiff": {"filter": "amount >= 0", "include_columns": ["order_id", "customer_id", "order_date", "amount", "credit_card_amount", "coupon_amount", "bank_transfer_amount", "gift_card_amount"], "exclude_columns": ["new_column"]}}}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "datafold_demo://models/core/schema.yml", "build_path": "target/run/datafold_demo/models/core/orders.sql", "deferred": false, "unrendered_config": {"materialized": "table", "meta": {"datafold": {"datadiff": {"filter": "amount >= 0", "include_columns": ["order_id", "customer_id", "order_date", "amount", "credit_card_amount", "coupon_amount", "bank_transfer_amount", "gift_card_amount"], "exclude_columns": ["new_column"]}}}}, "created_at": 1706724325.851211, "relation_name": "\"jaffle_shop\".\"dev\".\"orders\"", "raw_code": "{% set payment_methods = ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] %}\n\nwith orders as (\n\n select * from {{ ref('stg_orders') }}\n\n),\n\npayments as (\n\n select * from {{ ref('stg_payments') }}\n\n),\n\norder_payments as (\n\n select\n order_id,\n\n {% for payment_method in payment_methods -%}\n sum(case when payment_method = '{{ payment_method }}' then amount else 0 end) as {{ payment_method }}_amount,\n {% endfor -%}\n\n sum(amount) as total_amount\n\n from payments\n\n group by order_id\n\n),\n\nfinal as (\n\n select\n orders.order_id+1 as order_id,\n orders.customer_id,\n cast(orders.order_date as varchar) as order_date,\n {# orders.status, #}\n 1 as new_column,\n\n {% for payment_method in payment_methods -%}\n\n order_payments.{{ payment_method }}_amount,\n\n {% endfor -%}\n\n order_payments.total_amount as amount\n\n from orders\n\n\n left join order_payments\n on orders.order_id = order_payments.order_id\n\n),\n\nfinal2 as (\n\n select\n orders.order_id+2 as order_id,\n orders.customer_id,\n cast(orders.order_date as varchar) as order_date,\n {# orders.status, #}\n 1 as new_column,\n\n {% for payment_method in payment_methods -%}\n\n order_payments.{{ payment_method }}_amount,\n\n {% endfor -%}\n\n order_payments.total_amount as amount\n\n from orders\n\n\n left join order_payments\n on orders.order_id = order_payments.order_id\n\n)\n\n(select * from final limit 10)\nunion all\n(select * from final2 where order_id not in (select order_id from final) limit 10)", "language": "sql", "refs": [{"name": "stg_orders", "package": null, "version": null}, {"name": "stg_payments", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.datafold_demo.stg_orders", "model.datafold_demo.stg_payments"]}, "compiled_path": "target/compiled/datafold_demo/models/core/orders.sql", "compiled": true, "compiled_code": "\n\nwith orders as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_orders\"\n\n),\n\npayments as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_payments\"\n\n),\n\norder_payments as (\n\n select\n order_id,\n\n sum(case when payment_method = 'credit_card' then amount else 0 end) as credit_card_amount,\n sum(case when payment_method = 'coupon' then amount else 0 end) as coupon_amount,\n sum(case when payment_method = 'bank_transfer' then amount else 0 end) as bank_transfer_amount,\n sum(case when payment_method = 'gift_card' then amount else 0 end) as gift_card_amount,\n sum(amount) as total_amount\n\n from payments\n\n group by order_id\n\n),\n\nfinal as (\n\n select\n orders.order_id+1 as order_id,\n orders.customer_id,\n cast(orders.order_date as varchar) as order_date,\n \n 1 as new_column,\n\n order_payments.credit_card_amount,\n\n order_payments.coupon_amount,\n\n order_payments.bank_transfer_amount,\n\n order_payments.gift_card_amount,\n\n order_payments.total_amount as amount\n\n from orders\n\n\n left join order_payments\n on orders.order_id = order_payments.order_id\n\n),\n\nfinal2 as (\n\n select\n orders.order_id+2 as order_id,\n orders.customer_id,\n cast(orders.order_date as varchar) as order_date,\n \n 1 as new_column,\n\n order_payments.credit_card_amount,\n\n order_payments.coupon_amount,\n\n order_payments.bank_transfer_amount,\n\n order_payments.gift_card_amount,\n\n order_payments.total_amount as amount\n\n from orders\n\n\n left join order_payments\n on orders.order_id = order_payments.order_id\n\n)\n\n(select * from final limit 10)\nunion all\n(select * from final2 where order_id not in (select order_id from final) limit 10)", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "seed.datafold_demo.raw_customers": {"database": "jaffle_shop", "schema": "dev", "name": "raw_customers", "resource_type": "seed", "package_name": "datafold_demo", "path": "raw_customers.csv", "original_file_path": "seeds/raw_customers.csv", "unique_id": "seed.datafold_demo.raw_customers", "fqn": ["datafold_demo", "raw_customers"], "alias": "raw_customers", "checksum": {"name": "sha256", "checksum": "357d173dda65a741ad97d6683502286cc2655bb396ab5f4dfad12b8c39bd2a63"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null, "fast": true}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"fast": true}, "created_at": 1706724325.746119, "relation_name": "\"jaffle_shop\".\"dev\".\"raw_customers\"", "raw_code": "", "root_path": "/Users/sung/Desktop/data-diff/data_diff_demo", "depends_on": {"macros": []}}, "seed.datafold_demo.raw_orders": {"database": "jaffle_shop", "schema": "dev", "name": "raw_orders", "resource_type": "seed", "package_name": "datafold_demo", "path": "raw_orders.csv", "original_file_path": "seeds/raw_orders.csv", "unique_id": "seed.datafold_demo.raw_orders", "fqn": ["datafold_demo", "raw_orders"], "alias": "raw_orders", "checksum": {"name": "sha256", "checksum": "10ef49f6f03495b970b186248cc1123bc3ecf931f4e8bcafe65730089584d2a0"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null, "fast": true}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"fast": true}, "created_at": 1706724325.747665, "relation_name": "\"jaffle_shop\".\"dev\".\"raw_orders\"", "raw_code": "", "root_path": "/Users/sung/Desktop/data-diff/data_diff_demo", "depends_on": {"macros": []}}, "seed.datafold_demo.raw_payments": {"database": "jaffle_shop", "schema": "dev", "name": "raw_payments", "resource_type": "seed", "package_name": "datafold_demo", "path": "raw_payments.csv", "original_file_path": "seeds/raw_payments.csv", "unique_id": "seed.datafold_demo.raw_payments", "fqn": ["datafold_demo", "raw_payments"], "alias": "raw_payments", "checksum": {"name": "sha256", "checksum": "6de0626a8db9c1750eefd1b2e17fac4c2a4b9f778eb50532d8b377b90de395e6"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null, "fast": true}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"fast": true}, "created_at": 1706724325.749057, "relation_name": "\"jaffle_shop\".\"dev\".\"raw_payments\"", "raw_code": "", "root_path": "/Users/sung/Desktop/data-diff/data_diff_demo", "depends_on": {"macros": []}}, "test.datafold_demo.unique_stg_customers_customer_id.c7614daada": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('stg_customers')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "unique_stg_customers_customer_id", "resource_type": "test", "package_name": "datafold_demo", "path": "unique_stg_customers_customer_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.datafold_demo.unique_stg_customers_customer_id.c7614daada", "fqn": ["datafold_demo", "staging", "unique_stg_customers_customer_id"], "alias": "unique_stg_customers_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.828681, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_customers", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.datafold_demo.stg_customers"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "customer_id", "file_key_name": "models.stg_customers", "attached_node": "model.datafold_demo.stg_customers"}, "test.datafold_demo.not_null_stg_customers_customer_id.e2cfb1f9aa": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('stg_customers')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_stg_customers_customer_id", "resource_type": "test", "package_name": "datafold_demo", "path": "not_null_stg_customers_customer_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.datafold_demo.not_null_stg_customers_customer_id.e2cfb1f9aa", "fqn": ["datafold_demo", "staging", "not_null_stg_customers_customer_id"], "alias": "not_null_stg_customers_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.8301098, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_customers", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.datafold_demo.stg_customers"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "customer_id", "file_key_name": "models.stg_customers", "attached_node": "model.datafold_demo.stg_customers"}, "test.datafold_demo.unique_stg_orders_order_id.e3b841c71a": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "order_id", "model": "{{ get_where_subquery(ref('stg_orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "unique_stg_orders_order_id", "resource_type": "test", "package_name": "datafold_demo", "path": "unique_stg_orders_order_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.datafold_demo.unique_stg_orders_order_id.e3b841c71a", "fqn": ["datafold_demo", "staging", "unique_stg_orders_order_id"], "alias": "unique_stg_orders_order_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.83131, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_orders", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.datafold_demo.stg_orders"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "order_id", "file_key_name": "models.stg_orders", "attached_node": "model.datafold_demo.stg_orders"}, "test.datafold_demo.not_null_stg_orders_order_id.81cfe2fe64": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "order_id", "model": "{{ get_where_subquery(ref('stg_orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_stg_orders_order_id", "resource_type": "test", "package_name": "datafold_demo", "path": "not_null_stg_orders_order_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.datafold_demo.not_null_stg_orders_order_id.81cfe2fe64", "fqn": ["datafold_demo", "staging", "not_null_stg_orders_order_id"], "alias": "not_null_stg_orders_order_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.832438, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_orders", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.datafold_demo.stg_orders"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "order_id", "file_key_name": "models.stg_orders", "attached_node": "model.datafold_demo.stg_orders"}, "test.datafold_demo.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad": {"test_metadata": {"name": "accepted_values", "kwargs": {"values": ["placed", "shipped", "completed", "return_pending", "returned"], "column_name": "status", "model": "{{ get_where_subquery(ref('stg_orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned", "resource_type": "test", "package_name": "datafold_demo", "path": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.datafold_demo.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad", "fqn": ["datafold_demo", "staging", "accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned"], "alias": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"alias": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58"}, "created_at": 1706724325.833625, "relation_name": null, "raw_code": "{{ test_accepted_values(**_dbt_generic_test_kwargs) }}{{ config(alias=\"accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58\") }}", "language": "sql", "refs": [{"name": "stg_orders", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_accepted_values", "macro.dbt.get_where_subquery"], "nodes": ["model.datafold_demo.stg_orders"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "status", "file_key_name": "models.stg_orders", "attached_node": "model.datafold_demo.stg_orders"}, "test.datafold_demo.unique_stg_payments_payment_id.3744510712": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "payment_id", "model": "{{ get_where_subquery(ref('stg_payments')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "unique_stg_payments_payment_id", "resource_type": "test", "package_name": "datafold_demo", "path": "unique_stg_payments_payment_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.datafold_demo.unique_stg_payments_payment_id.3744510712", "fqn": ["datafold_demo", "staging", "unique_stg_payments_payment_id"], "alias": "unique_stg_payments_payment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.841217, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_payments", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.datafold_demo.stg_payments"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "payment_id", "file_key_name": "models.stg_payments", "attached_node": "model.datafold_demo.stg_payments"}, "test.datafold_demo.not_null_stg_payments_payment_id.c19cc50075": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "payment_id", "model": "{{ get_where_subquery(ref('stg_payments')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_stg_payments_payment_id", "resource_type": "test", "package_name": "datafold_demo", "path": "not_null_stg_payments_payment_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.datafold_demo.not_null_stg_payments_payment_id.c19cc50075", "fqn": ["datafold_demo", "staging", "not_null_stg_payments_payment_id"], "alias": "not_null_stg_payments_payment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.842358, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_payments", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.datafold_demo.stg_payments"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "payment_id", "file_key_name": "models.stg_payments", "attached_node": "model.datafold_demo.stg_payments"}, "test.datafold_demo.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278": {"test_metadata": {"name": "accepted_values", "kwargs": {"values": ["credit_card", "coupon", "bank_transfer", "gift_card"], "column_name": "payment_method", "model": "{{ get_where_subquery(ref('stg_payments')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card", "resource_type": "test", "package_name": "datafold_demo", "path": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.datafold_demo.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278", "fqn": ["datafold_demo", "staging", "accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card"], "alias": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"alias": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef"}, "created_at": 1706724325.8435452, "relation_name": null, "raw_code": "{{ test_accepted_values(**_dbt_generic_test_kwargs) }}{{ config(alias=\"accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef\") }}", "language": "sql", "refs": [{"name": "stg_payments", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_accepted_values", "macro.dbt.get_where_subquery"], "nodes": ["model.datafold_demo.stg_payments"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "payment_method", "file_key_name": "models.stg_payments", "attached_node": "model.datafold_demo.stg_payments"}, "test.datafold_demo.unique_customers_customer_id.c5af1ff4b1": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('customers')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "unique_customers_customer_id", "resource_type": "test", "package_name": "datafold_demo", "path": "unique_customers_customer_id.sql", "original_file_path": "models/core/schema.yml", "unique_id": "test.datafold_demo.unique_customers_customer_id.c5af1ff4b1", "fqn": ["datafold_demo", "core", "unique_customers_customer_id"], "alias": "unique_customers_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.851627, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "customers", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.datafold_demo.customers"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "customer_id", "file_key_name": "models.customers", "attached_node": "model.datafold_demo.customers"}, "test.datafold_demo.not_null_customers_customer_id.5c9bf9911d": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('customers')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_customers_customer_id", "resource_type": "test", "package_name": "datafold_demo", "path": "not_null_customers_customer_id.sql", "original_file_path": "models/core/schema.yml", "unique_id": "test.datafold_demo.not_null_customers_customer_id.5c9bf9911d", "fqn": ["datafold_demo", "core", "not_null_customers_customer_id"], "alias": "not_null_customers_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.852818, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "customers", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.datafold_demo.customers"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "customer_id", "file_key_name": "models.customers", "attached_node": "model.datafold_demo.customers"}, "test.datafold_demo.unique_orders_order_id.fed79b3a6e": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "order_id", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "unique_orders_order_id", "resource_type": "test", "package_name": "datafold_demo", "path": "unique_orders_order_id.sql", "original_file_path": "models/core/schema.yml", "unique_id": "test.datafold_demo.unique_orders_order_id.fed79b3a6e", "fqn": ["datafold_demo", "core", "unique_orders_order_id"], "alias": "unique_orders_order_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.853958, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "orders", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.datafold_demo.orders"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "order_id", "file_key_name": "models.orders", "attached_node": "model.datafold_demo.orders"}, "test.datafold_demo.not_null_orders_order_id.cf6c17daed": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "order_id", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_order_id", "resource_type": "test", "package_name": "datafold_demo", "path": "not_null_orders_order_id.sql", "original_file_path": "models/core/schema.yml", "unique_id": "test.datafold_demo.not_null_orders_order_id.cf6c17daed", "fqn": ["datafold_demo", "core", "not_null_orders_order_id"], "alias": "not_null_orders_order_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.855113, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "orders", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.datafold_demo.orders"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "order_id", "file_key_name": "models.orders", "attached_node": "model.datafold_demo.orders"}, "test.datafold_demo.not_null_orders_customer_id.c5f02694af": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_customer_id", "resource_type": "test", "package_name": "datafold_demo", "path": "not_null_orders_customer_id.sql", "original_file_path": "models/core/schema.yml", "unique_id": "test.datafold_demo.not_null_orders_customer_id.c5f02694af", "fqn": ["datafold_demo", "core", "not_null_orders_customer_id"], "alias": "not_null_orders_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.856253, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "orders", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.datafold_demo.orders"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "customer_id", "file_key_name": "models.orders", "attached_node": "model.datafold_demo.orders"}, "test.datafold_demo.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3": {"test_metadata": {"name": "accepted_values", "kwargs": {"values": ["placed", "shipped", "completed", "return_pending", "returned"], "column_name": "status", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "accepted_values_orders_status__placed__shipped__completed__return_pending__returned", "resource_type": "test", "package_name": "datafold_demo", "path": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758.sql", "original_file_path": "models/core/schema.yml", "unique_id": "test.datafold_demo.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3", "fqn": ["datafold_demo", "core", "accepted_values_orders_status__placed__shipped__completed__return_pending__returned"], "alias": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"alias": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758"}, "created_at": 1706724325.857507, "relation_name": null, "raw_code": "{{ test_accepted_values(**_dbt_generic_test_kwargs) }}{{ config(alias=\"accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758\") }}", "language": "sql", "refs": [{"name": "orders", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_accepted_values", "macro.dbt.get_where_subquery"], "nodes": ["model.datafold_demo.orders"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "status", "file_key_name": "models.orders", "attached_node": "model.datafold_demo.orders"}, "test.datafold_demo.not_null_orders_amount.106140f9fd": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_amount", "resource_type": "test", "package_name": "datafold_demo", "path": "not_null_orders_amount.sql", "original_file_path": "models/core/schema.yml", "unique_id": "test.datafold_demo.not_null_orders_amount.106140f9fd", "fqn": ["datafold_demo", "core", "not_null_orders_amount"], "alias": "not_null_orders_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.86013, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "orders", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.datafold_demo.orders"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "amount", "file_key_name": "models.orders", "attached_node": "model.datafold_demo.orders"}, "test.datafold_demo.not_null_orders_credit_card_amount.d3ca593b59": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "credit_card_amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_credit_card_amount", "resource_type": "test", "package_name": "datafold_demo", "path": "not_null_orders_credit_card_amount.sql", "original_file_path": "models/core/schema.yml", "unique_id": "test.datafold_demo.not_null_orders_credit_card_amount.d3ca593b59", "fqn": ["datafold_demo", "core", "not_null_orders_credit_card_amount"], "alias": "not_null_orders_credit_card_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.861248, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "orders", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.datafold_demo.orders"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "credit_card_amount", "file_key_name": "models.orders", "attached_node": "model.datafold_demo.orders"}, "test.datafold_demo.not_null_orders_coupon_amount.ab90c90625": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "coupon_amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_coupon_amount", "resource_type": "test", "package_name": "datafold_demo", "path": "not_null_orders_coupon_amount.sql", "original_file_path": "models/core/schema.yml", "unique_id": "test.datafold_demo.not_null_orders_coupon_amount.ab90c90625", "fqn": ["datafold_demo", "core", "not_null_orders_coupon_amount"], "alias": "not_null_orders_coupon_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.8625238, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "orders", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.datafold_demo.orders"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "coupon_amount", "file_key_name": "models.orders", "attached_node": "model.datafold_demo.orders"}, "test.datafold_demo.not_null_orders_bank_transfer_amount.7743500c49": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "bank_transfer_amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_bank_transfer_amount", "resource_type": "test", "package_name": "datafold_demo", "path": "not_null_orders_bank_transfer_amount.sql", "original_file_path": "models/core/schema.yml", "unique_id": "test.datafold_demo.not_null_orders_bank_transfer_amount.7743500c49", "fqn": ["datafold_demo", "core", "not_null_orders_bank_transfer_amount"], "alias": "not_null_orders_bank_transfer_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.863639, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "orders", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.datafold_demo.orders"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "bank_transfer_amount", "file_key_name": "models.orders", "attached_node": "model.datafold_demo.orders"}, "test.datafold_demo.not_null_orders_gift_card_amount.413a0d2d7a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "gift_card_amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "jaffle_shop", "schema": "dev_dbt_test__audit", "name": "not_null_orders_gift_card_amount", "resource_type": "test", "package_name": "datafold_demo", "path": "not_null_orders_gift_card_amount.sql", "original_file_path": "models/core/schema.yml", "unique_id": "test.datafold_demo.not_null_orders_gift_card_amount.413a0d2d7a", "fqn": ["datafold_demo", "core", "not_null_orders_gift_card_amount"], "alias": "not_null_orders_gift_card_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706724325.864759, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "orders", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.datafold_demo.orders"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "gift_card_amount", "file_key_name": "models.orders", "attached_node": "model.datafold_demo.orders"}}, "sources": {}, "macros": {"macro.dbt_duckdb.duckdb__get_binding_char": {"name": "duckdb__get_binding_char", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/seed.sql", "original_file_path": "macros/seed.sql", "unique_id": "macro.dbt_duckdb.duckdb__get_binding_char", "macro_sql": "{% macro duckdb__get_binding_char() %}\n {{ return(adapter.get_binding_char()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2156742, "supported_languages": null}, "macro.dbt_duckdb.duckdb__get_batch_size": {"name": "duckdb__get_batch_size", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/seed.sql", "original_file_path": "macros/seed.sql", "unique_id": "macro.dbt_duckdb.duckdb__get_batch_size", "macro_sql": "{% macro duckdb__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.215832, "supported_languages": null}, "macro.dbt_duckdb.duckdb__load_csv_rows": {"name": "duckdb__load_csv_rows", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/seed.sql", "original_file_path": "macros/seed.sql", "unique_id": "macro.dbt_duckdb.duckdb__load_csv_rows", "macro_sql": "{% macro duckdb__load_csv_rows(model, agate_table) %}\n {% if config.get('fast', true) %}\n {% set seed_file_path = adapter.get_seed_file_path(model) %}\n {% set delimiter = config.get('delimiter', ',') %}\n {% set sql %}\n COPY {{ this.render() }} FROM '{{ seed_file_path }}' (FORMAT CSV, HEADER TRUE, DELIMITER '{{ delimiter }}')\n {% endset %}\n {% do adapter.add_query(sql, abridge_sql_log=True) %}\n {{ return(sql) }}\n {% endif %}\n\n {% set batch_size = get_batch_size() %}\n {% set agate_table = adapter.convert_datetimes_to_strs(agate_table) %}\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.217742, "supported_languages": null}, "macro.dbt_duckdb.duckdb__snapshot_merge_sql": {"name": "duckdb__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/snapshot_helper.sql", "original_file_path": "macros/snapshot_helper.sql", "unique_id": "macro.dbt_duckdb.duckdb__snapshot_merge_sql", "macro_sql": "{% macro duckdb__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }} as DBT_INTERNAL_TARGET\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = DBT_INTERNAL_TARGET.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and DBT_INTERNAL_TARGET.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.218645, "supported_languages": null}, "macro.dbt_duckdb.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/snapshot_helper.sql", "original_file_path": "macros/snapshot_helper.sql", "unique_id": "macro.dbt_duckdb.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(False, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.219084, "supported_languages": null}, "macro.dbt_duckdb.duckdb__post_snapshot": {"name": "duckdb__post_snapshot", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/snapshot_helper.sql", "original_file_path": "macros/snapshot_helper.sql", "unique_id": "macro.dbt_duckdb.duckdb__post_snapshot", "macro_sql": "{% macro duckdb__post_snapshot(staging_relation) %}\n {% do return(drop_relation(staging_relation)) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.219243, "supported_languages": null}, "macro.dbt_duckdb.duckdb__get_catalog": {"name": "duckdb__get_catalog", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_duckdb.duckdb__get_catalog", "macro_sql": "{% macro duckdb__get_catalog(information_schema, schemas) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n select\n '{{ database }}' as table_database,\n t.table_schema,\n t.table_name,\n t.table_type,\n '' as table_comment,\n c.column_name,\n c.ordinal_position as column_index,\n c.data_type column_type,\n '' as column_comment,\n '' as table_owner\n FROM information_schema.tables t JOIN information_schema.columns c ON t.table_schema = c.table_schema AND t.table_name = c.table_name\n WHERE (\n {%- for schema in schemas -%}\n upper(t.table_schema) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n AND t.table_type IN ('BASE TABLE', 'VIEW')\n ORDER BY\n t.table_schema,\n t.table_name,\n c.ordinal_position\n {%- endcall -%}\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.219906, "supported_languages": null}, "macro.dbt_duckdb.duckdb__create_schema": {"name": "duckdb__create_schema", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__create_schema", "macro_sql": "{% macro duckdb__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2280838, "supported_languages": null}, "macro.dbt_duckdb.duckdb__drop_schema": {"name": "duckdb__drop_schema", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__drop_schema", "macro_sql": "{% macro duckdb__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.22829, "supported_languages": null}, "macro.dbt_duckdb.duckdb__list_schemas": {"name": "duckdb__list_schemas", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__list_schemas", "macro_sql": "{% macro duckdb__list_schemas(database) -%}\n {% set sql %}\n select schema_name\n from system.information_schema.schemata\n {% if database is not none %}\n where catalog_name = '{{ database }}'\n {% endif %}\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.228587, "supported_languages": null}, "macro.dbt_duckdb.duckdb__check_schema_exists": {"name": "duckdb__check_schema_exists", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__check_schema_exists", "macro_sql": "{% macro duckdb__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from system.information_schema.schemata\n where schema_name = '{{ schema }}'\n and catalog_name = '{{ information_schema.database }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.228841, "supported_languages": null}, "macro.dbt_duckdb.get_column_names": {"name": "get_column_names", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.get_column_names", "macro_sql": "{% macro get_column_names() %}\n {# loop through user_provided_columns to get column names #}\n {%- set user_provided_columns = model['columns'] -%}\n (\n {% for i in user_provided_columns %}\n {% set col = user_provided_columns[i] %}\n {{ col['name'] }} {{ \",\" if not loop.last }}\n {% endfor %}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2292428, "supported_languages": null}, "macro.dbt_duckdb.duckdb__create_table_as": {"name": "duckdb__create_table_as", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__create_table_as", "macro_sql": "{% macro duckdb__create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {%- if language == 'sql' -%}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(compiled_code) }}\n {% endif %}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% if contract_config.enforced and not temporary %}\n {#-- DuckDB doesnt support constraints on temp tables --#}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} {{ get_column_names() }} (\n {{ get_select_subquery(compiled_code) }}\n );\n {% else %}\n as (\n {{ compiled_code }}\n );\n {% endif %}\n {%- elif language == 'python' -%}\n {{ py_write_table(temporary=temporary, relation=relation, compiled_code=compiled_code) }}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"duckdb__create_table_as macro didn't get supported language, it got %s\" % language) %}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt_duckdb.get_column_names", "macro.dbt.get_select_subquery", "macro.dbt_duckdb.py_write_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.230325, "supported_languages": null}, "macro.dbt_duckdb.py_write_table": {"name": "py_write_table", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.py_write_table", "macro_sql": "{% macro py_write_table(temporary, relation, compiled_code) -%}\n{{ compiled_code }}\n\ndef materialize(df, con):\n try:\n import pyarrow\n pyarrow_available = True\n except ImportError:\n pyarrow_available = False\n finally:\n if pyarrow_available and isinstance(df, pyarrow.Table):\n # https://github.com/duckdb/duckdb/issues/6584\n import pyarrow.dataset\n con.execute('create table {{ relation }} as select * from df')\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.230502, "supported_languages": null}, "macro.dbt_duckdb.duckdb__create_view_as": {"name": "duckdb__create_view_as", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__create_view_as", "macro_sql": "{% macro duckdb__create_view_as(relation, sql) -%}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.230922, "supported_languages": null}, "macro.dbt_duckdb.duckdb__get_columns_in_relation": {"name": "duckdb__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__get_columns_in_relation", "macro_sql": "{% macro duckdb__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from system.information_schema.columns\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n {% if relation.database %}\n and table_catalog = '{{ relation.database }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2314408, "supported_languages": null}, "macro.dbt_duckdb.duckdb__list_relations_without_caching": {"name": "duckdb__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__list_relations_without_caching", "macro_sql": "{% macro duckdb__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n table_name as name,\n table_schema as schema,\n CASE table_type\n WHEN 'BASE TABLE' THEN 'table'\n WHEN 'VIEW' THEN 'view'\n WHEN 'LOCAL TEMPORARY' THEN 'table'\n END as type\n from system.information_schema.tables\n where table_schema = '{{ schema_relation.schema }}'\n and table_catalog = '{{ schema_relation.database }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.231773, "supported_languages": null}, "macro.dbt_duckdb.duckdb__drop_relation": {"name": "duckdb__drop_relation", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__drop_relation", "macro_sql": "{% macro duckdb__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.231991, "supported_languages": null}, "macro.dbt_duckdb.duckdb__rename_relation": {"name": "duckdb__rename_relation", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__rename_relation", "macro_sql": "{% macro duckdb__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter {{ to_relation.type }} {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.232312, "supported_languages": null}, "macro.dbt_duckdb.duckdb__make_temp_relation": {"name": "duckdb__make_temp_relation", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__make_temp_relation", "macro_sql": "{% macro duckdb__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix ~ py_current_timestring() %}\n {% do return(base_relation.incorporate(\n path={\n \"identifier\": tmp_identifier,\n \"schema\": none,\n \"database\": none\n })) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.py_current_timestring"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.232762, "supported_languages": null}, "macro.dbt_duckdb.duckdb__current_timestamp": {"name": "duckdb__current_timestamp", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__current_timestamp", "macro_sql": "{% macro duckdb__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.232842, "supported_languages": null}, "macro.dbt_duckdb.duckdb__snapshot_string_as_time": {"name": "duckdb__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__snapshot_string_as_time", "macro_sql": "{% macro duckdb__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2330172, "supported_languages": null}, "macro.dbt_duckdb.duckdb__snapshot_get_time": {"name": "duckdb__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__snapshot_get_time", "macro_sql": "{% macro duckdb__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.233113, "supported_languages": null}, "macro.dbt_duckdb.duckdb__get_incremental_default_sql": {"name": "duckdb__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.duckdb__get_incremental_default_sql", "macro_sql": "{% macro duckdb__get_incremental_default_sql(arg_dict) %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.233261, "supported_languages": null}, "macro.dbt_duckdb.location_exists": {"name": "location_exists", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.location_exists", "macro_sql": "{% macro location_exists(location) -%}\n {% do return(adapter.location_exists(location)) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.233422, "supported_languages": null}, "macro.dbt_duckdb.write_to_file": {"name": "write_to_file", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.write_to_file", "macro_sql": "{% macro write_to_file(relation, location, options) -%}\n {% call statement('write_to_file') -%}\n copy {{ relation }} to '{{ location }}' ({{ options }})\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.233629, "supported_languages": null}, "macro.dbt_duckdb.store_relation": {"name": "store_relation", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.store_relation", "macro_sql": "{% macro store_relation(plugin, relation, location, format, config) -%}\n {%- set column_list = adapter.get_columns_in_relation(relation) -%}\n {% do adapter.store_relation(plugin, relation, column_list, location, format, config) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.234048, "supported_languages": null}, "macro.dbt_duckdb.render_write_options": {"name": "render_write_options", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_duckdb.render_write_options", "macro_sql": "{% macro render_write_options(config) -%}\n {% set options = config.get('options', {}) %}\n {% if options is not mapping %}\n {% do exceptions.raise_compiler_error(\"The options argument must be a dictionary\") %}\n {% endif %}\n\n {% for k in options %}\n {% set _ = options.update({k: render(options[k])}) %}\n {% endfor %}\n\n {# legacy top-level write options #}\n {% if config.get('format') %}\n {% set _ = options.update({'format': render(config.get('format'))}) %}\n {% endif %}\n {% if config.get('delimiter') %}\n {% set _ = options.update({'delimiter': render(config.get('delimiter'))}) %}\n {% endif %}\n\n {% do return(options) %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.23509, "supported_languages": null}, "macro.dbt_duckdb.duckdb__get_delete_insert_merge_sql": {"name": "duckdb__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/incremental_helper.sql", "original_file_path": "macros/incremental_helper.sql", "unique_id": "macro.dbt_duckdb.duckdb__get_delete_insert_merge_sql", "macro_sql": "{% macro duckdb__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }} as DBT_INCREMENTAL_TARGET\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = DBT_INCREMENTAL_TARGET.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.237298, "supported_languages": null}, "macro.dbt_duckdb.duckdb__alter_relation_add_remove_columns": {"name": "duckdb__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/columns.sql", "original_file_path": "macros/columns.sql", "unique_id": "macro.dbt_duckdb.duckdb__alter_relation_add_remove_columns", "macro_sql": "{% macro duckdb__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns %}\n {% for column in add_columns %}\n {% set sql -%}\n alter {{ relation.type }} {{ relation }} add column\n {{ column.name }} {{ column.data_type }}\n {%- endset -%}\n {% do run_query(sql) %}\n {% endfor %}\n {% endif %}\n\n {% if remove_columns %}\n {% for column in remove_columns %}\n {% set sql -%}\n alter {{ relation.type }} {{ relation }} drop column\n {{ column.name }}\n {%- endset -%}\n {% do run_query(sql) %}\n {% endfor %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.238336, "supported_languages": null}, "macro.dbt_duckdb.materialization_table_duckdb": {"name": "materialization_table_duckdb", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/materializations/table.sql", "original_file_path": "macros/materializations/table.sql", "unique_id": "macro.dbt_duckdb.materialization_table_duckdb", "macro_sql": "{% materialization table, adapter=\"duckdb\", supported_languages=['sql', 'python'] %}\n\n {%- set language = model['language'] -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main', language=language) -%}\n {{- create_table_as(False, intermediate_relation, compiled_code, language) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.241374, "supported_languages": ["sql", "python"]}, "macro.dbt_duckdb.materialization_external_duckdb": {"name": "materialization_external_duckdb", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/materializations/external.sql", "original_file_path": "macros/materializations/external.sql", "unique_id": "macro.dbt_duckdb.materialization_external_duckdb", "macro_sql": "{% materialization external, adapter=\"duckdb\", supported_languages=['sql', 'python'] %}\n\n {%- set location = render(config.get('location', default=external_location(this, config))) -%})\n {%- set rendered_options = render_write_options(config) -%}\n {%- set format = config.get('format', 'parquet') -%}\n {%- set write_options = adapter.external_write_options(location, rendered_options) -%}\n {%- set read_location = adapter.external_read_location(location, rendered_options) -%}\n\n -- set language - python or sql\n {%- set language = model['language'] -%}\n\n {%- set target_relation = this.incorporate(type='view') %}\n\n -- Continue as normal materialization\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set temp_relation = make_intermediate_relation(this.incorporate(type='table'), suffix='__dbt_tmp') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation, suffix='__dbt_int') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_temp_relation = load_cached_relation(temp_relation) -%}\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_temp_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('create_table', language=language) -%}\n {{- create_table_as(False, temp_relation, compiled_code, language) }}\n {%- endcall %}\n\n -- write an temp relation into file\n {{ write_to_file(temp_relation, location, write_options) }}\n -- create a view on top of the location\n {% call statement('main', language='sql') -%}\n create or replace view {{ intermediate_relation }} as (\n select * from '{{ read_location }}'\n );\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(temp_relation) }}\n\n -- register table into glue\n {%- set plugin_name = config.get('plugin') -%}\n {%- set glue_register = config.get('glue_register', default=false) -%}\n {% if plugin_name is not none or glue_register is true %}\n {% if glue_register %}\n {# legacy hack to set the glue database name, deprecate this #}\n {%- set plugin_name = 'glue|' ~ config.get('glue_database', 'default') -%}\n {% endif %}\n {% do store_relation(plugin_name, target_relation, location, format, config) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt_duckdb.external_location", "macro.dbt_duckdb.render_write_options", "macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt_duckdb.write_to_file", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt_duckdb.store_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.246537, "supported_languages": ["sql", "python"]}, "macro.dbt_duckdb.materialization_incremental_duckdb": {"name": "materialization_incremental_duckdb", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/materializations/incremental.sql", "original_file_path": "macros/materializations/incremental.sql", "unique_id": "macro.dbt_duckdb.materialization_incremental_duckdb", "macro_sql": "{% materialization incremental, adapter=\"duckdb\", supported_languages=['sql', 'python'] -%}\n\n {%- set language = model['language'] -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, compiled_code, language) %}\n {% elif full_refresh_mode %}\n {% set build_sql = create_table_as(False, intermediate_relation, compiled_code, language) %}\n {% set need_swap = true %}\n {% else %}\n {% if language == 'python' %}\n {% set build_python = create_table_as(False, temp_relation, compiled_code, language) %}\n {% call statement(\"pre\", language=language) %}\n {{- build_python }}\n {% endcall %}\n {% else %} {# SQL #}\n {% do run_query(create_table_as(True, temp_relation, compiled_code, language)) %}\n {% endif %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n {% set language = \"sql\" %}\n\n {% endif %}\n\n {% call statement(\"main\", language=language) %}\n {{- build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.statement", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.252039, "supported_languages": ["sql", "python"]}, "macro.dbt_duckdb.duckdb__dateadd": {"name": "duckdb__dateadd", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_duckdb.duckdb__dateadd", "macro_sql": "{% macro duckdb__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.252274, "supported_languages": null}, "macro.dbt_duckdb.duckdb__listagg": {"name": "duckdb__listagg", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_duckdb.duckdb__listagg", "macro_sql": "{% macro duckdb__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n {% if limit_num -%}\n list_aggr(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n 'string_agg',\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2529438, "supported_languages": null}, "macro.dbt_duckdb.duckdb__datediff": {"name": "duckdb__datediff", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_duckdb.duckdb__datediff", "macro_sql": "{% macro duckdb__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.256305, "supported_languages": null}, "macro.dbt_duckdb.duckdb__any_value": {"name": "duckdb__any_value", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_duckdb.duckdb__any_value", "macro_sql": "{% macro duckdb__any_value(expression) -%}\n\n arbitrary({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.256445, "supported_languages": null}, "macro.dbt_duckdb.register_upstream_external_models": {"name": "register_upstream_external_models", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/upstream.sql", "original_file_path": "macros/utils/upstream.sql", "unique_id": "macro.dbt_duckdb.register_upstream_external_models", "macro_sql": "{%- macro register_upstream_external_models() -%}\n{% if execute %}\n{% set upstream_nodes = {} %}\n{% set upstream_schemas = {} %}\n{% for node in selected_resources %}\n {% for upstream_node in graph['nodes'][node]['depends_on']['nodes'] %}\n {% if upstream_node not in upstream_nodes and upstream_node not in selected_resources %}\n {% do upstream_nodes.update({upstream_node: None}) %}\n {% set upstream = graph['nodes'].get(upstream_node) %}\n {% if upstream\n and upstream.resource_type in ('model', 'seed')\n and upstream.config.materialized=='external'\n %}\n {%- set upstream_rel = api.Relation.create(\n database=upstream['database'],\n schema=upstream['schema'],\n identifier=upstream['alias']\n ) -%}\n {%- set location = upstream.config.get('location', external_location(upstream_rel, upstream.config)) -%}\n {%- set rendered_options = render_write_options(upstream.config) -%}\n {%- set upstream_location = adapter.external_read_location(location, rendered_options) -%}\n {% if upstream_rel.schema not in upstream_schemas %}\n {% call statement('main', language='sql') -%}\n create schema if not exists {{ upstream_rel.schema }}\n {%- endcall %}\n {% do upstream_schemas.update({upstream_rel.schema: None}) %}\n {% endif %}\n {% call statement('main', language='sql') -%}\n create or replace view {{ upstream_rel }} as (\n select * from '{{ upstream_location }}'\n );\n {%- endcall %}\n {%- endif %}\n {% endif %}\n {% endfor %}\n{% endfor %}\n{% do adapter.commit() %}\n{% endif %}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_duckdb.external_location", "macro.dbt_duckdb.render_write_options", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2589571, "supported_languages": null}, "macro.dbt_duckdb.duckdb__split_part": {"name": "duckdb__split_part", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/splitpart.sql", "original_file_path": "macros/utils/splitpart.sql", "unique_id": "macro.dbt_duckdb.duckdb__split_part", "macro_sql": "{% macro duckdb__split_part(string_text, delimiter_text, part_number) %}\n string_split({{ string_text }}, {{ delimiter_text }})[ {{ part_number }} ]\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2591739, "supported_languages": null}, "macro.dbt_duckdb.duckdb__last_day": {"name": "duckdb__last_day", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/lastday.sql", "original_file_path": "macros/utils/lastday.sql", "unique_id": "macro.dbt_duckdb.duckdb__last_day", "macro_sql": "{% macro duckdb__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- duckdb dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2596638, "supported_languages": null}, "macro.dbt_duckdb.external_location": {"name": "external_location", "resource_type": "macro", "package_name": "dbt_duckdb", "path": "macros/utils/external_location.sql", "original_file_path": "macros/utils/external_location.sql", "unique_id": "macro.dbt_duckdb.external_location", "macro_sql": "{%- macro external_location(relation, config) -%}\n {%- if config.get('options', {}).get('partition_by') is none -%}\n {%- set format = config.get('format', 'parquet') -%}\n {{- adapter.external_root() }}/{{ relation.identifier }}.{{ format }}\n {%- else -%}\n {{- adapter.external_root() }}/{{ relation.identifier }}\n {%- endif -%}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.260224, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.261298, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.261497, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.261643, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.261805, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.261951, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.262305, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2626119, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.262914, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.263359, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.263625, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.267334, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.267494, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2677, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.268401, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.268558, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.268734, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2701252, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.271523, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.275705, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.275985, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.276147, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.276243, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.276404, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.27652, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2767382, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2776792, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.277874, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.278126, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.278594, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.285076, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.287831, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.288308, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.28864, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.289033, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2894359, "supported_languages": null}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.294364, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.2947588, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.295027, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.296279, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.296524, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.297158, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.300074, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3028, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.304387, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.304915, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3055842, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.305813, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.306525, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3130748, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.314634, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.314893, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3158798, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3161511, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3167958, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.31741, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3182871, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3185332, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.31872, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.319013, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3192692, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.319564, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.319751, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.320024, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3202062, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.320364, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3206358, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3254771, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.331095, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.332284, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.33348, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.334361, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.334629, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.334752, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3350348, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.33516, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.33875, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.341901, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.346889, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.347757, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.348011, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.34848, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.348659, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3488052, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.348948, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.34906, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.349211, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.349329, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3498209, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.349995, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3512099, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3516212, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.351978, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.352462, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.352705, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.352967, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3533351, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.353569, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.354291, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.354657, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.354845, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.355039, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.355237, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.355992, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3573382, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.357704, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3579578, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.35826, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.358452, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.359222, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.359672, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.359879, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.360169, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.360509, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.360769, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.361225, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3616471, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3619862, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3621829, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.362443, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.36256, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.362815, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.362957, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.363263, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.363452, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.363706, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.363841, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.364435, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.36461, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"\": [{\"action\": \"\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3648782, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.365012, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.365271, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.365404, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3663511, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.366464, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.366965, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.367124, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.367249, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.368504, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.368866, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.369202, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3694642, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3695629, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.369812, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.369948, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.370205, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.370343, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.371142, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.371315, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.371722, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3723981, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.372839, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3730109, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.get_column_names", "macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.373178, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.373425, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3735301, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.374401, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.374536, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.37571, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.375901, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3761091, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3763762, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3765209, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.376925, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.377078, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.37725, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.377648, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.378002, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.378277, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.378516, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3790772, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3804932, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.381058, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.381345, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3831549, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.384467, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.385221, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.385448, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.385685, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.38577, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.386504, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.387072, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3873, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3876731, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.38798, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.388132, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.388392, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3885179, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.389342, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.389757, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3899379, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3904488, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.390711, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.390826, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.391176, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.39133, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3915389, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.391679, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.39193, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3920588, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3923478, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.392482, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3931258, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3935292, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.393843, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.394003, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.394272, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3944068, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.394668, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.39483, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.395061, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3952289, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3954592, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3955572, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.395829, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.395956, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.396199, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.396354, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3972638, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3974159, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3975801, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.397724, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.397878, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.398019, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.398171, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.398336, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.398498, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.398651, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3988109, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3989499, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.399112, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.399254, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.3995352, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.39966, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.399913, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.400013, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4003358, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4006479, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.400789, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.401296, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4014568, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.401684, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.40197, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.402091, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.402441, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.402674, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4029598, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4030888, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.403466, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.403645, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.403808, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.40399, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.404497, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.404661, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.404811, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.404914, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4050748, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.405149, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.405377, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt.default__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4055412, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.406538, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.406691, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4068449, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4072719, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4074728, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.407617, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4077818, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4079201, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4099782, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.410145, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.410369, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.410657, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.410888, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.411212, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.411399, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4115698, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.411826, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4124212, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.412653, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.412787, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.413215, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4136178, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.413891, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.414112, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.41579, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4159071, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.41607, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4163651, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.416749, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.416945, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.417047, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.417266, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.417452, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.417669, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.417844, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.41807, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.418817, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.418998, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4192362, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4194639, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.420606, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4211462, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.421351, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.421504, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.422226, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.422389, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4225862, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.422762, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.423021, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4234922, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4262662, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.426522, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.426711, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4269829, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.427163, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.427315, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4274821, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.427791, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.42798, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.428295, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4284742, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.428642, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4288142, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.428958, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4291542, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.429329, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4315882, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.431743, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.432042, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4322581, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.432459, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.432628, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4338, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.434129, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.434307, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4346728, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.434895, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.43546, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_duckdb.duckdb__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.435704, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.436485, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.438126, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.438272, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.439056, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.439469, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.440036, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.440507, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.440581, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4410932, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.441312, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.4416041, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706724325.441893, "supported_languages": null}}, "docs": {"doc.datafold_demo.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "datafold_demo", "path": "overview.md", "original_file_path": "models/overview.md", "unique_id": "doc.datafold_demo.__overview__", "block_contents": "## Data Documentation for Jaffle Shop\n\n`jaffle_shop` is a fictional ecommerce store.\n\nThis [dbt](https://www.getdbt.com/) project is for testing out code.\n\nThe source code can be found [here](https://github.com/clrcrl/jaffle_shop)."}, "doc.datafold_demo.orders_status": {"name": "orders_status", "resource_type": "doc", "package_name": "datafold_demo", "path": "docs.md", "original_file_path": "models/docs.md", "unique_id": "doc.datafold_demo.orders_status", "block_contents": "Orders can be one of the following statuses:\n\n| status | description |\n|----------------|------------------------------------------------------------------------------------------------------------------------|\n| placed | The order has been placed but has not yet left the warehouse |\n| shipped | The order has ben shipped to the customer and is currently in transit |\n| completed | The order has been received by the customer |\n| return_pending | The customer has indicated that they would like to return the order, but it has not yet been received at the warehouse |\n| returned | The order has been returned by the customer and received at the warehouse |"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {}, "groups": {}, "selectors": {}, "disabled": {}, "parent_map": {"model.datafold_demo.stg_customers": ["seed.datafold_demo.raw_customers"], "model.datafold_demo.stg_payments": ["seed.datafold_demo.raw_payments"], "model.datafold_demo.stg_orders": ["seed.datafold_demo.raw_orders"], "model.datafold_demo.customers": ["model.datafold_demo.stg_customers", "model.datafold_demo.stg_orders", "model.datafold_demo.stg_payments"], "model.datafold_demo.orders": ["model.datafold_demo.stg_orders", "model.datafold_demo.stg_payments"], "seed.datafold_demo.raw_customers": [], "seed.datafold_demo.raw_orders": [], "seed.datafold_demo.raw_payments": [], "test.datafold_demo.unique_stg_customers_customer_id.c7614daada": ["model.datafold_demo.stg_customers"], "test.datafold_demo.not_null_stg_customers_customer_id.e2cfb1f9aa": ["model.datafold_demo.stg_customers"], "test.datafold_demo.unique_stg_orders_order_id.e3b841c71a": ["model.datafold_demo.stg_orders"], "test.datafold_demo.not_null_stg_orders_order_id.81cfe2fe64": ["model.datafold_demo.stg_orders"], "test.datafold_demo.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad": ["model.datafold_demo.stg_orders"], "test.datafold_demo.unique_stg_payments_payment_id.3744510712": ["model.datafold_demo.stg_payments"], "test.datafold_demo.not_null_stg_payments_payment_id.c19cc50075": ["model.datafold_demo.stg_payments"], "test.datafold_demo.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278": ["model.datafold_demo.stg_payments"], "test.datafold_demo.unique_customers_customer_id.c5af1ff4b1": ["model.datafold_demo.customers"], "test.datafold_demo.not_null_customers_customer_id.5c9bf9911d": ["model.datafold_demo.customers"], "test.datafold_demo.unique_orders_order_id.fed79b3a6e": ["model.datafold_demo.orders"], "test.datafold_demo.not_null_orders_order_id.cf6c17daed": ["model.datafold_demo.orders"], "test.datafold_demo.not_null_orders_customer_id.c5f02694af": ["model.datafold_demo.orders"], "test.datafold_demo.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3": ["model.datafold_demo.orders"], "test.datafold_demo.not_null_orders_amount.106140f9fd": ["model.datafold_demo.orders"], "test.datafold_demo.not_null_orders_credit_card_amount.d3ca593b59": ["model.datafold_demo.orders"], "test.datafold_demo.not_null_orders_coupon_amount.ab90c90625": ["model.datafold_demo.orders"], "test.datafold_demo.not_null_orders_bank_transfer_amount.7743500c49": ["model.datafold_demo.orders"], "test.datafold_demo.not_null_orders_gift_card_amount.413a0d2d7a": ["model.datafold_demo.orders"]}, "child_map": {"model.datafold_demo.stg_customers": ["model.datafold_demo.customers", "test.datafold_demo.not_null_stg_customers_customer_id.e2cfb1f9aa", "test.datafold_demo.unique_stg_customers_customer_id.c7614daada"], "model.datafold_demo.stg_payments": ["model.datafold_demo.customers", "model.datafold_demo.orders", "test.datafold_demo.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278", "test.datafold_demo.not_null_stg_payments_payment_id.c19cc50075", "test.datafold_demo.unique_stg_payments_payment_id.3744510712"], "model.datafold_demo.stg_orders": ["model.datafold_demo.customers", "model.datafold_demo.orders", "test.datafold_demo.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad", "test.datafold_demo.not_null_stg_orders_order_id.81cfe2fe64", "test.datafold_demo.unique_stg_orders_order_id.e3b841c71a"], "model.datafold_demo.customers": ["test.datafold_demo.not_null_customers_customer_id.5c9bf9911d", "test.datafold_demo.unique_customers_customer_id.c5af1ff4b1"], "model.datafold_demo.orders": ["test.datafold_demo.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3", "test.datafold_demo.not_null_orders_amount.106140f9fd", "test.datafold_demo.not_null_orders_bank_transfer_amount.7743500c49", "test.datafold_demo.not_null_orders_coupon_amount.ab90c90625", "test.datafold_demo.not_null_orders_credit_card_amount.d3ca593b59", "test.datafold_demo.not_null_orders_customer_id.c5f02694af", "test.datafold_demo.not_null_orders_gift_card_amount.413a0d2d7a", "test.datafold_demo.not_null_orders_order_id.cf6c17daed", "test.datafold_demo.unique_orders_order_id.fed79b3a6e"], "seed.datafold_demo.raw_customers": ["model.datafold_demo.stg_customers"], "seed.datafold_demo.raw_orders": ["model.datafold_demo.stg_orders"], "seed.datafold_demo.raw_payments": ["model.datafold_demo.stg_payments"], "test.datafold_demo.unique_stg_customers_customer_id.c7614daada": [], "test.datafold_demo.not_null_stg_customers_customer_id.e2cfb1f9aa": [], "test.datafold_demo.unique_stg_orders_order_id.e3b841c71a": [], "test.datafold_demo.not_null_stg_orders_order_id.81cfe2fe64": [], "test.datafold_demo.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad": [], "test.datafold_demo.unique_stg_payments_payment_id.3744510712": [], "test.datafold_demo.not_null_stg_payments_payment_id.c19cc50075": [], "test.datafold_demo.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278": [], "test.datafold_demo.unique_customers_customer_id.c5af1ff4b1": [], "test.datafold_demo.not_null_customers_customer_id.5c9bf9911d": [], "test.datafold_demo.unique_orders_order_id.fed79b3a6e": [], "test.datafold_demo.not_null_orders_order_id.cf6c17daed": [], "test.datafold_demo.not_null_orders_customer_id.c5f02694af": [], "test.datafold_demo.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3": [], "test.datafold_demo.not_null_orders_amount.106140f9fd": [], "test.datafold_demo.not_null_orders_credit_card_amount.d3ca593b59": [], "test.datafold_demo.not_null_orders_coupon_amount.ab90c90625": [], "test.datafold_demo.not_null_orders_bank_transfer_amount.7743500c49": [], "test.datafold_demo.not_null_orders_gift_card_amount.413a0d2d7a": []}, "group_map": {}, "saved_queries": {}, "semantic_models": {}} \ No newline at end of file diff --git a/tests/dbt_artifacts/target/run_results.json b/tests/dbt_artifacts/target/run_results.json index 6886685e0..69fff250d 100644 --- a/tests/dbt_artifacts/target/run_results.json +++ b/tests/dbt_artifacts/target/run_results.json @@ -1 +1 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v4.json", "dbt_version": "1.4.5", "generated_at": "2023-03-28T17:53:00.424212Z", "invocation_id": "289d7789-15b8-44be-a1f6-828f34858212", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2023-03-28T17:53:00.283978Z", "completed_at": "2023-03-28T17:53:00.285641Z"}, {"name": "execute", "started_at": "2023-03-28T17:53:00.285856Z", "completed_at": "2023-03-28T17:53:00.341543Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.059603214263916016, "adapter_response": {"_message": "OK"}, "message": "OK", "failures": null, "unique_id": "model.jaffle_shop.stg_customers"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2023-03-28T17:53:00.345114Z", "completed_at": "2023-03-28T17:53:00.346354Z"}, {"name": "execute", "started_at": "2023-03-28T17:53:00.346549Z", "completed_at": "2023-03-28T17:53:00.357085Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.013532876968383789, "adapter_response": {"_message": "OK"}, "message": "OK", "failures": null, "unique_id": "model.jaffle_shop.stg_orders"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2023-03-28T17:53:00.359417Z", "completed_at": "2023-03-28T17:53:00.360809Z"}, {"name": "execute", "started_at": "2023-03-28T17:53:00.361018Z", "completed_at": "2023-03-28T17:53:00.371547Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.013769149780273438, "adapter_response": {"_message": "OK"}, "message": "OK", "failures": null, "unique_id": "model.jaffle_shop.stg_payments"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2023-03-28T17:53:00.374177Z", "completed_at": "2023-03-28T17:53:00.375799Z"}, {"name": "execute", "started_at": "2023-03-28T17:53:00.376001Z", "completed_at": "2023-03-28T17:53:00.399869Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.02851414680480957, "adapter_response": {"_message": "OK"}, "message": "OK", "failures": null, "unique_id": "model.jaffle_shop.customers"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2023-03-28T17:53:00.403579Z", "completed_at": "2023-03-28T17:53:00.405505Z"}, {"name": "execute", "started_at": "2023-03-28T17:53:00.405703Z", "completed_at": "2023-03-28T17:53:00.417432Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.01594710350036621, "adapter_response": {"_message": "OK"}, "message": "OK", "failures": null, "unique_id": "model.jaffle_shop.orders"}], "elapsed_time": 0.17573904991149902, "args": {"write_json": true, "use_colors": true, "printer_width": 80, "version_check": true, "partial_parse": true, "static_parser": true, "profiles_dir": "/Users/dan/.dbt", "send_anonymous_usage_stats": true, "quiet": false, "no_print": false, "cache_selected_only": false, "target": "dev", "which": "run", "rpc_method": "run", "indirect_selection": "eager"}} \ No newline at end of file +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v5.json", "dbt_version": "1.7.3", "generated_at": "2024-01-31T18:05:26.126216Z", "invocation_id": "baf0db27-f48e-40a3-aa05-1b05b809ec5a", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2024-01-31T18:05:25.960740Z", "completed_at": "2024-01-31T18:05:25.969466Z"}, {"name": "execute", "started_at": "2024-01-31T18:05:25.970040Z", "completed_at": "2024-01-31T18:05:26.040906Z"}], "thread_id": "Thread-1", "execution_time": 0.08749794960021973, "adapter_response": {"_message": "OK"}, "message": "OK", "failures": null, "unique_id": "model.datafold_demo.stg_customers", "compiled": true, "compiled_code": "with source as (\n select * from \"jaffle_shop\".\"dev\".\"raw_customers\"\n\n),\n\nrenamed as (\n\n select\n id as customer_id,\n first_name,\n last_name\n\n from source\n\n)\n\nselect * from renamed", "relation_name": "\"jaffle_shop\".\"dev\".\"stg_customers\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-01-31T18:05:25.967740Z", "completed_at": "2024-01-31T18:05:25.982895Z"}, {"name": "execute", "started_at": "2024-01-31T18:05:25.993495Z", "completed_at": "2024-01-31T18:05:26.048436Z"}], "thread_id": "Thread-3", "execution_time": 0.09017610549926758, "adapter_response": {"_message": "OK"}, "message": "OK", "failures": null, "unique_id": "model.datafold_demo.stg_payments", "compiled": true, "compiled_code": "with source as (\n select * from \"jaffle_shop\".\"dev\".\"raw_payments\"\n\n),\n\nrenamed as (\n\n select\n id as payment_id,\n order_id,\n payment_method,\n\n -- `amount` is currently stored in cents, so we convert it to dollars\n amount / 100 as amount\n\n from source\n\n)\n\nselect * from renamed", "relation_name": "\"jaffle_shop\".\"dev\".\"stg_payments\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-01-31T18:05:25.965435Z", "completed_at": "2024-01-31T18:05:25.976208Z"}, {"name": "execute", "started_at": "2024-01-31T18:05:25.990582Z", "completed_at": "2024-01-31T18:05:26.049746Z"}], "thread_id": "Thread-2", "execution_time": 0.09407401084899902, "adapter_response": {"_message": "OK"}, "message": "OK", "failures": null, "unique_id": "model.datafold_demo.stg_orders", "compiled": true, "compiled_code": "with source as (\n select * from \"jaffle_shop\".\"dev\".\"raw_orders\"\n\n),\n\nrenamed as (\n\n select\n id as order_id,\n user_id as customer_id,\n order_date,\n status\n\n from source\n\n)\n\nselect * from renamed", "relation_name": "\"jaffle_shop\".\"dev\".\"stg_orders\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-01-31T18:05:26.057925Z", "completed_at": "2024-01-31T18:05:26.062891Z"}, {"name": "execute", "started_at": "2024-01-31T18:05:26.064062Z", "completed_at": "2024-01-31T18:05:26.110534Z"}], "thread_id": "Thread-5", "execution_time": 0.05589699745178223, "adapter_response": {"_message": "OK"}, "message": "OK", "failures": null, "unique_id": "model.datafold_demo.customers", "compiled": true, "compiled_code": "with customers as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_customers\"\n\n),\n\norders as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_orders\"\n\n),\n\npayments as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_payments\"\n\n),\n\ncustomer_orders as (\n\n select\n customer_id,\n\n min(order_date) as first_order,\n max(order_date) as most_recent_order,\n count(order_id) as number_of_orders\n from orders\n\n group by customer_id\n\n),\n\ncustomer_payments as (\n\n select\n orders.customer_id,\n sum(amount) as total_amount\n\n from payments\n\n left join orders on\n payments.order_id = orders.order_id\n\n group by orders.customer_id\n\n),\n\nfinal as (\n\n select\n customers.customer_id,\n customers.first_name,\n customers.last_name,\n customer_orders.first_order,\n customer_orders.most_recent_order,\n customer_orders.number_of_orders,\n customer_payments.total_amount as customer_lifetime_value\n\n from customers\n\n left join customer_orders\n on customers.customer_id = customer_orders.customer_id\n\n left join customer_payments\n on customers.customer_id = customer_payments.customer_id\n\n)\n\nselect * from final", "relation_name": "\"jaffle_shop\".\"dev\".\"customers\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-01-31T18:05:26.059981Z", "completed_at": "2024-01-31T18:05:26.063227Z"}, {"name": "execute", "started_at": "2024-01-31T18:05:26.075887Z", "completed_at": "2024-01-31T18:05:26.111336Z"}], "thread_id": "Thread-6", "execution_time": 0.0593719482421875, "adapter_response": {"_message": "OK"}, "message": "OK", "failures": null, "unique_id": "model.datafold_demo.orders", "compiled": true, "compiled_code": "\n\nwith orders as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_orders\"\n\n),\n\npayments as (\n\n select * from \"jaffle_shop\".\"dev\".\"stg_payments\"\n\n),\n\norder_payments as (\n\n select\n order_id,\n\n sum(case when payment_method = 'credit_card' then amount else 0 end) as credit_card_amount,\n sum(case when payment_method = 'coupon' then amount else 0 end) as coupon_amount,\n sum(case when payment_method = 'bank_transfer' then amount else 0 end) as bank_transfer_amount,\n sum(case when payment_method = 'gift_card' then amount else 0 end) as gift_card_amount,\n sum(amount) as total_amount\n\n from payments\n\n group by order_id\n\n),\n\nfinal as (\n\n select\n orders.order_id+1 as order_id,\n orders.customer_id,\n cast(orders.order_date as varchar) as order_date,\n \n 1 as new_column,\n\n order_payments.credit_card_amount,\n\n order_payments.coupon_amount,\n\n order_payments.bank_transfer_amount,\n\n order_payments.gift_card_amount,\n\n order_payments.total_amount as amount\n\n from orders\n\n\n left join order_payments\n on orders.order_id = order_payments.order_id\n\n),\n\nfinal2 as (\n\n select\n orders.order_id+2 as order_id,\n orders.customer_id,\n cast(orders.order_date as varchar) as order_date,\n \n 1 as new_column,\n\n order_payments.credit_card_amount,\n\n order_payments.coupon_amount,\n\n order_payments.bank_transfer_amount,\n\n order_payments.gift_card_amount,\n\n order_payments.total_amount as amount\n\n from orders\n\n\n left join order_payments\n on orders.order_id = order_payments.order_id\n\n)\n\n(select * from final limit 10)\nunion all\n(select * from final2 where order_id not in (select order_id from final) limit 10)", "relation_name": "\"jaffle_shop\".\"dev\".\"orders\""}], "elapsed_time": 0.23143315315246582, "args": {"log_path": "/Users/sung/Desktop/data-diff/data_diff_demo/logs", "strict_mode": false, "macro_debugging": false, "log_file_max_bytes": 10485760, "warn_error_options": {"include": [], "exclude": []}, "send_anonymous_usage_stats": true, "partial_parse": true, "defer": false, "log_level": "info", "enable_legacy_logger": false, "use_colors_file": true, "cache_selected_only": false, "version_check": true, "vars": {}, "log_level_file": "debug", "exclude": [], "populate_cache": true, "quiet": false, "profiles_dir": "/Users/sung/Desktop/data-diff/data_diff_demo", "which": "run", "favor_state": false, "partial_parse_file_diff": true, "invocation_command": "dbt run", "log_format_file": "debug", "use_colors": true, "indirect_selection": "eager", "project_dir": "/Users/sung/Desktop/data-diff/data_diff_demo", "introspect": true, "select": [], "write_json": true, "printer_width": 80, "show_resource_report": false, "print": true, "log_format": "default", "static_parser": true}} \ No newline at end of file diff --git a/tests/test_database.py b/tests/test_database.py index 0280eae1c..f5055ce78 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -5,8 +5,9 @@ import attrs import pytz -from data_diff import connect +from data_diff import connect, Database from data_diff import databases as dbs +from data_diff.abcs.database_types import TimestampTZ from data_diff.queries.api import table, current_timestamp from data_diff.queries.extras import NormalizeAsString from data_diff.schema import create_schema @@ -17,7 +18,6 @@ str_to_checksum, random_table_suffix, ) -from data_diff.abcs.database_types import TimestampTZ TEST_DATABASES = { dbs.MySQL, @@ -48,11 +48,11 @@ class TestMD5(unittest.TestCase): def test_md5_as_int(self): self.mysql = connect(TEST_MYSQL_CONN_STRING) - str = "hello world" - query_fragment = self.mysql.dialect.md5_as_int("'{0}'".format(str)) + message = "hello world" + query_fragment = self.mysql.dialect.md5_as_int(f"'{message}'") query = f"SELECT {query_fragment}" - self.assertEqual(str_to_checksum(str), self.mysql.query(query, int)) + self.assertEqual(str_to_checksum(message), self.mysql.query(query, int)) class TestConnect(unittest.TestCase): @@ -74,42 +74,48 @@ def test_correct_timezone(self): if self.db_cls in [dbs.MsSQL]: self.skipTest("No support for session tz.") name = "tbl_" + random_table_suffix() - db = get_conn(self.db_cls) - tbl = table(name, schema={"id": int, "created_at": TimestampTZ(9), "updated_at": TimestampTZ(9)}) - db.query(tbl.create()) + db_connection = get_conn(self.db_cls) + with db_connection: + tbl = table(name, schema={"id": int, "created_at": TimestampTZ(9), "updated_at": TimestampTZ(9)}) - tz = pytz.timezone("Europe/Berlin") + db_connection.query(tbl.create()) - now = datetime.now(tz) - if isinstance(db, dbs.Presto): - ms = now.microsecond // 1000 * 1000 # Presto max precision is 3 - now = now.replace(microsecond=ms) + tz = pytz.timezone("Europe/Berlin") - db.query(table(name).insert_row(1, now, now)) - db.query(db.dialect.set_timezone_to_utc()) + now = datetime.now(tz) + if isinstance(db_connection, dbs.Presto): + ms = now.microsecond // 1000 * 1000 # Presto max precision is 3 + now = now.replace(microsecond=ms) - t = table(name) - raw_schema = db.query_table_schema(t.path) - schema = db._process_table_schema(t.path, raw_schema) - schema = create_schema(db.name, t, schema, case_sensitive=True) - t = attrs.evolve(t, schema=schema) - t.schema["created_at"] = attrs.evolve(t.schema["created_at"], precision=t.schema["created_at"].precision) + db_connection.query(table(name).insert_row(1, now, now)) + db_connection.query(db_connection.dialect.set_timezone_to_utc()) - tbl = table(name, schema=t.schema) + table_object = table(name) + raw_schema = db_connection.query_table_schema(table_object.path) + schema = db_connection._process_table_schema(table_object.path, raw_schema) + schema = create_schema(db_connection.name, table_object, schema, case_sensitive=True) + table_object = attrs.evolve(table_object, schema=schema) + table_object.schema["created_at"] = attrs.evolve( + table_object.schema["created_at"], precision=table_object.schema["created_at"].precision + ) - results = db.query(tbl.select(NormalizeAsString(tbl[c]) for c in ["created_at", "updated_at"]), List[Tuple]) + tbl = table(name, schema=table_object.schema) - created_at = results[0][1] - updated_at = results[0][1] + results = db_connection.query( + tbl.select(NormalizeAsString(tbl[c]) for c in ["created_at", "updated_at"]), List[Tuple] + ) - utc = now.astimezone(pytz.UTC) - expected = utc.__format__("%Y-%m-%d %H:%M:%S.%f") + created_at = results[0][1] + updated_at = results[0][1] - self.assertEqual(created_at, expected) - self.assertEqual(updated_at, expected) + utc = now.astimezone(pytz.UTC) + expected = utc.__format__("%Y-%m-%d %H:%M:%S.%f") - db.query(tbl.drop()) + self.assertEqual(created_at, expected) + self.assertEqual(updated_at, expected) + + db_connection.query(tbl.drop()) @test_each_database @@ -119,18 +125,77 @@ def test_three_part_support(self): self.skipTest("Limited support for 3 part ids") table_name = "tbl_" + random_table_suffix() - db = get_conn(self.db_cls) - db_res = db.query(f"SELECT {db.dialect.current_database()}") - schema_res = db.query(f"SELECT {db.dialect.current_schema()}") - db_name = db_res.rows[0][0] - schema_name = schema_res.rows[0][0] - - table_one_part = table((table_name,), schema={"id": int}) - table_two_part = table((schema_name, table_name), schema={"id": int}) - table_three_part = table((db_name, schema_name, table_name), schema={"id": int}) - - for part in (table_one_part, table_two_part, table_three_part): - db.query(part.create()) - d = db.query_table_schema(part.path) - assert len(d) == 1 - db.query(part.drop()) + db_connection = get_conn(self.db_cls) + with db_connection: + db_res = db_connection.query(f"SELECT {db_connection.dialect.current_database()}") + schema_res = db_connection.query(f"SELECT {db_connection.dialect.current_schema()}") + db_name = db_res.rows[0][0] + schema_name = schema_res.rows[0][0] + + table_one_part = table((table_name,), schema={"id": int}) + table_two_part = table((schema_name, table_name), schema={"id": int}) + table_three_part = table((db_name, schema_name, table_name), schema={"id": int}) + + for part in (table_one_part, table_two_part, table_three_part): + db_connection.query(part.create()) + schema = db_connection.query_table_schema(part.path) + assert len(schema) == 1 + db_connection.query(part.drop()) + + +@test_each_database +class TestNumericPrecisionParsing(unittest.TestCase): + def test_specified_precision(self): + name = "tbl_" + random_table_suffix() + db_connection = get_conn(self.db_cls) + with db_connection: + table_object = table(name, schema={"value": "DECIMAL(10, 2)"}) + db_connection.query(table_object.create()) + table_object = table(name) + raw_schema = db_connection.query_table_schema(table_object.path) + schema = db_connection._process_table_schema(table_object.path, raw_schema) + self.assertEqual(schema["value"].precision, 2) + + def test_specified_zero_precision(self): + name = "tbl_" + random_table_suffix() + db_connection = get_conn(self.db_cls) + with db_connection: + table_object = table(name, schema={"value": "DECIMAL(10)"}) + db_connection.query(table_object.create()) + table_object = table(name) + raw_schema = db_connection.query_table_schema(table_object.path) + schema = db_connection._process_table_schema(table_object.path, raw_schema) + self.assertEqual(schema["value"].precision, 0) + + def test_default_precision(self): + name = "tbl_" + random_table_suffix() + db_connection = get_conn(self.db_cls) + with db_connection: + table_object = table(name, schema={"value": "DECIMAL"}) + db_connection.query(table_object.create()) + table_object = table(name) + raw_schema = db_connection.query_table_schema(table_object.path) + schema = db_connection._process_table_schema(table_object.path, raw_schema) + self.assertEqual(schema["value"].precision, db_connection.dialect.DEFAULT_NUMERIC_PRECISION) + + +# Skip presto as it doesn't support a close method: +# https://github.com/prestodb/presto-python-client/blob/be2610e524fa8400c9f2baa41ba0159d44ac2b11/prestodb/dbapi.py#L130 +closeable_databases = TEST_DATABASES.copy() +closeable_databases.discard(dbs.Presto) + +test_closeable_databases: Callable = test_each_database_in_list(closeable_databases) + + +@test_closeable_databases +class TestCloseMethod(unittest.TestCase): + def test_close_connection(self): + database: Database = get_conn(self.db_cls) + + # Perform a query to verify the connection is established + with database: + database.query("SELECT 1") + + # Now the connection should be closed, and trying to execute a query should fail. + with self.assertRaises(Exception): # Catch any type of exception. + database.query("SELECT 1") diff --git a/tests/test_database_types.py b/tests/test_database_types.py index e97ca4844..4bbeb0eae 100644 --- a/tests/test_database_types.py +++ b/tests/test_database_types.py @@ -8,9 +8,11 @@ import logging from decimal import Decimal from itertools import islice, repeat, chain +from typing import Iterator from parameterized import parameterized +from data_diff.databases.base import Row from data_diff.utils import number_to_human from data_diff.queries.api import table, commit, this, Code from data_diff.queries.api import insert_rows_in_batches @@ -366,12 +368,12 @@ class PaginatedTable: # much memory. RECORDS_PER_BATCH = 1000000 - def __init__(self, table_path, conn): + def __init__(self, table_path, conn) -> None: super().__init__() self.table_path = table_path self.conn = conn - def __iter__(self): + def __iter__(self) -> Iterator[Row]: last_id = 0 while True: query = ( @@ -398,46 +400,46 @@ class DateTimeFaker: datetime.fromisoformat("2022-06-01 15:10:05.009900"), ] - def __init__(self, max): + def __init__(self, max) -> None: super().__init__() self.max = max - def __iter__(self): + def __iter__(self) -> Iterator[datetime]: initial = datetime(2000, 1, 1, 0, 0, 0, 0) step = timedelta(seconds=3, microseconds=571) return islice(chain(self.MANUAL_FAKES, accumulate(repeat(step), initial=initial)), self.max) - def __len__(self): + def __len__(self) -> int: return self.max class IntFaker: MANUAL_FAKES = [127, -3, -9, 37, 15, 0] - def __init__(self, max): + def __init__(self, max) -> None: super().__init__() self.max = max - def __iter__(self): + def __iter__(self) -> Iterator[int]: initial = -128 step = 1 return islice(chain(self.MANUAL_FAKES, accumulate(repeat(step), initial=initial)), self.max) - def __len__(self): + def __len__(self) -> int: return self.max class BooleanFaker: MANUAL_FAKES = [False, True, True, False] - def __init__(self, max): + def __init__(self, max) -> None: super().__init__() self.max = max - def __iter__(self): + def __iter__(self) -> Iterator[bool]: return iter(self.MANUAL_FAKES[: self.max]) - def __len__(self): + def __len__(self) -> int: return min(self.max, len(self.MANUAL_FAKES)) @@ -461,28 +463,28 @@ class FloatFaker: 3.141592653589793, ] - def __init__(self, max): + def __init__(self, max) -> None: super().__init__() self.max = max - def __iter__(self): + def __iter__(self) -> Iterator[float]: initial = -10.0001 step = 0.00571 return islice(chain(self.MANUAL_FAKES, accumulate(repeat(step), initial=initial)), self.max) - def __len__(self): + def __len__(self) -> int: return self.max class UUID_Faker: - def __init__(self, max): + def __init__(self, max) -> None: super().__init__() self.max = max - def __len__(self): + def __len__(self) -> int: return self.max - def __iter__(self): + def __iter__(self) -> Iterator[uuid.UUID]: return (uuid.uuid1(i) for i in range(self.max)) @@ -491,14 +493,14 @@ class JsonFaker: '{"keyText": "text", "keyInt": 3, "keyFloat": 5.4445, "keyBoolean": true}', ] - def __init__(self, max): + def __init__(self, max) -> None: super().__init__() self.max = max - def __iter__(self): + def __iter__(self) -> Iterator[str]: return iter(self.MANUAL_FAKES[: self.max]) - def __len__(self): + def __len__(self) -> int: return min(self.max, len(self.MANUAL_FAKES)) diff --git a/tests/test_dbt.py b/tests/test_dbt.py index c56765539..764f8d179 100644 --- a/tests/test_dbt.py +++ b/tests/test_dbt.py @@ -22,6 +22,7 @@ from data_diff.dbt_parser import ( TDatadiffConfig, ) +from data_diff.schema import RawColumnInfo from tests.test_cli import run_datadiff_cli @@ -35,16 +36,80 @@ def test_integration_basic_dbt(self): "--dbt", "--dbt-project-dir", test_project_path, "--dbt-profiles-dir", test_profiles_path ) - # assertions for the diff that exists in tests/dbt_artifacts/jaffle_shop.duckdb - if test_project_path == artifacts_path: - diff_string = b"".join(diff).decode("utf-8") - # 5 diffs were ran - assert diff_string.count("<>") == 5 - # 4 with no diffs - assert diff_string.count("No row differences") == 4 - # 1 with a diff - assert diff_string.count(" Rows Added Rows Removed") == 1 + orders_expected_output = """ + jaffle_shop.prod.orders <> jaffle_shop.dev.orders + Primary Keys: ['order_id'] + Where Filter: 'amount >= 0' + Included Columns: ['order_id', 'customer_id', 'order_date', 'amount', 'credit_card_amount', 'coupon_amount', + 'bank_transfer_amount', 'gift_card_amount'] + Excluded Columns: ['new_column'] + Columns removed [-1]: {'status'} + Columns added [+1]: {'new_column'} + Type changed [1]: {'order_date'} + + rows PROD <> DEV + --------- ------ ------------ ----------------- + Total 10 11 [+1] + Added +2 + Removed -1 + Different 9 + Unchanged 0 + + columns # diff values + -------------------- --------------- + amount 8 + bank_transfer_amount 3 + coupon_amount 3 + credit_card_amount 6 + customer_id 9 + gift_card_amount 2 + """ + + stg_payments_expected_output = """ + jaffle_shop.prod.stg_payments <> jaffle_shop.dev.stg_payments + Primary Keys: ['payment_id'] + No row differences + """ + + stg_customers_expected_output = """ + jaffle_shop.prod.stg_customers <> jaffle_shop.dev.stg_customers + Primary Keys: ['customer_id'] + No row differences + """ + + stg_orders_expected_output = """ + jaffle_shop.prod.stg_orders <> jaffle_shop.dev.stg_orders + Primary Keys: ['order_id'] + No row differences + """ + + customers_expected_output = """ + jaffle_shop.prod.customers <> jaffle_shop.dev.customers + Primary Keys: ['customer_id'] + No row differences + """ + + expected_outputs = [ + orders_expected_output, + stg_payments_expected_output, + stg_customers_expected_output, + stg_orders_expected_output, + customers_expected_output, + ] + if test_project_path == artifacts_path: + actual_output_stripped = b"".join(diff).decode("utf-8").strip().replace(" ", "") + + for expected_output in expected_outputs: + expected_output_stripped = "".join(line.strip() for line in expected_output.split("\n")).replace( + " ", "" + ) + assert expected_output_stripped in actual_output_stripped + + @unittest.skipIf( + not os.environ.get("MOTHERDUCK_TOKEN"), + "MOTHERDUCK_TOKEN doesn't exist or is empty if this is run from a forked branch pull request", + ) def test_integration_motherduck_dbt(self): artifacts_path = os.getcwd() + "/tests/dbt_artifacts" test_project_path = os.environ.get("DATA_DIFF_DBT_PROJ") or artifacts_path @@ -56,12 +121,19 @@ def test_integration_motherduck_dbt(self): # assertions for the diff that exists in tests/dbt_artifacts/jaffle_shop.duckdb if test_project_path == artifacts_path: diff_string = b"".join(diff).decode("utf-8") - # 5 diffs were ran - assert diff_string.count("<>") == 5 # 4 with no diffs assert diff_string.count("No row differences") == 4 # 1 with a diff - assert diff_string.count(" Rows Added Rows Removed") == 1 + assert diff_string.count("PROD") == 1 + assert diff_string.count("DEV") == 1 + assert diff_string.count("Primary Keys") == 5 + assert diff_string.count("Where Filter") == 1 + assert diff_string.count("Type Changed") == 0 + assert diff_string.count("Total") == 1 + assert diff_string.count("Added") == 1 + assert diff_string.count("Removed") == 1 + assert diff_string.count("Different") == 1 + assert diff_string.count("Unchanged") == 1 def test_integration_cloud_dbt(self): project_dir = os.environ.get("DATA_DIFF_DBT_PROJ") @@ -75,7 +147,10 @@ def test_integration_cloud_dbt(self): def test_local_diff(self, mock_diff_tables): connection = {} mock_table1 = Mock() - column_dictionary = {"col1": ("col1", "type"), "col2": ("col2", "type")} + column_dictionary = { + "col1": RawColumnInfo(column_name="col1", data_type="type"), + "col2": RawColumnInfo(column_name="col2", data_type="type"), + } mock_table1.get_schema.return_value = column_dictionary mock_table2 = Mock() mock_table2.get_schema.return_value = column_dictionary @@ -120,8 +195,14 @@ def test_local_diff_types_differ(self, mock_diff_tables): connection = {} mock_table1 = Mock() mock_table2 = Mock() - table1_column_dictionary = {"col1": ("col1", "type"), "col2": ("col2", "type")} - table2_column_dictionary = {"col1": ("col1", "type"), "col2": ("col2", "differing_type")} + table1_column_dictionary = { + "col1": RawColumnInfo(column_name="col1", data_type="type"), + "col2": RawColumnInfo(column_name="col2", data_type="type"), + } + table2_column_dictionary = { + "col1": RawColumnInfo(column_name="col1", data_type="type"), + "col2": RawColumnInfo(column_name="col2", data_type="differing_type"), + } mock_table1.get_schema.return_value = table1_column_dictionary mock_table2.get_schema.return_value = table2_column_dictionary mock_diff = MagicMock() @@ -161,7 +242,10 @@ def test_local_diff_types_differ(self, mock_diff_tables): @patch("data_diff.dbt.diff_tables") def test_local_diff_no_diffs(self, mock_diff_tables): connection = {} - column_dictionary = {"col1": ("col1", "type"), "col2": ("col2", "type")} + column_dictionary = { + "col1": RawColumnInfo(column_name="col1", data_type="type"), + "col2": RawColumnInfo(column_name="col2", data_type="type"), + } mock_table1 = Mock() mock_table1.get_schema.return_value = column_dictionary mock_table2 = Mock() @@ -214,6 +298,8 @@ def test_cloud_diff(self, mock_api, mock_os_environ, mock_print): expected_primary_keys = ["primary_key_column"] threads = None where = "a_string" + include_columns = ["created_at", "num_users", "sub_created_at", "sub_plan"] + exclude_columns = ["new_column"] connection = {} mock_api.create_data_diff.return_value = {"id": 123} mock_os_environ.get.return_value = expected_api_key @@ -225,8 +311,8 @@ def test_cloud_diff(self, mock_api, mock_os_environ, mock_print): connection=connection, threads=threads, where_filter=where, - include_columns=[], - exclude_columns=[], + include_columns=include_columns, + exclude_columns=exclude_columns, ) _cloud_diff(diff_vars, expected_datasource_id, org_meta=org_meta, api=mock_api) @@ -240,8 +326,8 @@ def test_cloud_diff(self, mock_api, mock_os_environ, mock_print): self.assertEqual(payload.table1, prod_qualified_list) self.assertEqual(payload.table2, dev_qualified_list) self.assertEqual(payload.pk_columns, expected_primary_keys) - self.assertEqual(payload.filter1, where) - self.assertEqual(payload.filter2, where) + self.assertEqual(payload.include_columns, include_columns) + self.assertEqual(payload.exclude_columns, exclude_columns) @patch("data_diff.dbt._initialize_api") @patch("data_diff.dbt._get_diff_vars") diff --git a/tests/test_diff_tables.py b/tests/test_diff_tables.py index 9a9750895..705bf55e8 100644 --- a/tests/test_diff_tables.py +++ b/tests/test_diff_tables.py @@ -818,11 +818,11 @@ def test_simple2(self): V1 = N + 1 V2 = N * 1000 + 2 - diffs = [(i, i + N) for i in range(N)] + diffs = [(i + 1, i + N) for i in range(N)] # pk=[1..1000], no dupes self.connection.query( [ - self.src_table.insert_rows(diffs + [(K, V1)]), - self.dst_table.insert_rows(diffs + [(0, V2)]), + self.src_table.insert_rows(diffs + [(K, V1)]), # exclusive pk=1001 + self.dst_table.insert_rows(diffs + [(0, V2)]), # exclusive pk=0 commit, ] ) diff --git a/tests/test_duckdb.py b/tests/test_duckdb.py new file mode 100644 index 000000000..10c3644e3 --- /dev/null +++ b/tests/test_duckdb.py @@ -0,0 +1,42 @@ +import unittest +from data_diff.databases import duckdb as duckdb_differ +import os +import uuid + +test_duckdb_filepath = str(uuid.uuid4()) + ".duckdb" + + +class TestDuckDBTableSchemaMethods(unittest.TestCase): + def setUp(self): + # Create a new duckdb file + self.duckdb_conn = duckdb_differ.DuckDB(filepath=test_duckdb_filepath) + + def tearDown(self): + # Optional: delete file after tests + os.remove(test_duckdb_filepath) + + def test_normalize_table_path(self): + self.assertEqual(self.duckdb_conn._normalize_table_path(("test_table",)), (None, "main", "test_table")) + self.assertEqual( + self.duckdb_conn._normalize_table_path(("test_schema", "test_table")), (None, "test_schema", "test_table") + ) + self.assertEqual( + self.duckdb_conn._normalize_table_path(("test_database", "test_schema", "test_table")), + ("test_database", "test_schema", "test_table"), + ) + + with self.assertRaises(ValueError): + self.duckdb_conn._normalize_table_path(("test_database", "test_schema", "test_table", "extra")) + + def test_select_table_schema(self): + db_path = ("test_table",) + expected_sql = "SELECT column_name, data_type, datetime_precision, numeric_precision, numeric_scale FROM information_schema.columns WHERE table_name = 'test_table' AND table_schema = 'main' and table_catalog = current_catalog()" + self.assertEqual(self.duckdb_conn.select_table_schema(db_path), expected_sql) + + db_path = ("custom_schema", "test_table") + expected_sql = "SELECT column_name, data_type, datetime_precision, numeric_precision, numeric_scale FROM information_schema.columns WHERE table_name = 'test_table' AND table_schema = 'custom_schema' and table_catalog = current_catalog()" + self.assertEqual(self.duckdb_conn.select_table_schema(db_path), expected_sql) + + db_path = ("custom_db", "custom_schema", "test_table") + expected_sql = "SELECT column_name, data_type, datetime_precision, numeric_precision, numeric_scale FROM custom_db.information_schema.columns WHERE table_name = 'test_table' AND table_schema = 'custom_schema' and table_catalog = 'custom_db'" + self.assertEqual(self.duckdb_conn.select_table_schema(db_path), expected_sql) diff --git a/tests/test_main.py b/tests/test_main.py new file mode 100644 index 000000000..cc1333cba --- /dev/null +++ b/tests/test_main.py @@ -0,0 +1,249 @@ +import unittest + +from data_diff import Database, JoinDiffer, HashDiffer +from data_diff import databases as db +from data_diff.__main__ import _get_dbs, _set_age, _get_table_differ, _get_expanded_columns, _get_threads +from data_diff.databases.mysql import MySQL +from data_diff.diff_tables import TableDiffer +from tests.common import CONN_STRINGS, get_conn, DiffTestCase + + +class TestGetDBS(unittest.TestCase): + def test__get_dbs(self) -> None: + db1: Database + db2: Database + db1_str: str = CONN_STRINGS[db.PostgreSQL] + db2_str: str = CONN_STRINGS[db.PostgreSQL] + + # no threads and 2 threads1 + db1, db2 = _get_dbs(0, db1_str, 2, db2_str, 0, False) + with db1, db2: + assert db1 == db2 + assert db1.thread_count == 2 + + # 3 threads and 0 threads1 + db1, db2 = _get_dbs(3, db1_str, 0, db2_str, 0, False) + with db1, db2: + assert db1 == db2 + assert db1.thread_count == 3 + + # not interactive + db1, db2 = _get_dbs(1, db1_str, 0, db2_str, 0, False) + with db1, db2: + assert db1 == db2 + assert not db1._interactive + + # interactive + db1, db2 = _get_dbs(1, db1_str, 0, db2_str, 0, True) + with db1, db2: + assert db1 == db2 + assert db1._interactive + + db2_str: str = CONN_STRINGS[db.MySQL] + + # no threads and 1 threads1 and 2 thread2 + db1, db2 = _get_dbs(0, db1_str, 1, db2_str, 2, False) + with db1, db2: + assert db1 != db2 + assert db1.thread_count == 1 + assert db2.thread_count == 2 + + # 3 threads and 0 threads1 and 0 thread2 + db1, db2 = _get_dbs(3, db1_str, 0, db2_str, 0, False) + with db1, db2: + assert db1 != db2 + assert db1.thread_count == 3 + assert db2.thread_count == 3 + assert db1.thread_count == db2.thread_count + + # not interactive + db1, db2 = _get_dbs(1, db1_str, 0, db2_str, 0, False) + with db1, db2: + assert db1 != db2 + assert not db1._interactive + assert not db2._interactive + + # interactive + db1, db2 = _get_dbs(1, db1_str, 0, db2_str, 0, True) + with db1, db2: + assert db1 != db2 + assert db1._interactive + assert db2._interactive + + def test_database_connection_failure(self) -> None: + """Test when database connection fails.""" + with self.assertRaises(Exception): # Assuming that connect() raises Exception on connection failure + _get_dbs(1, "db1_str", 0, "db2_str", 0, False) + + def test_invalid_inputs(self) -> None: + """Test invalid inputs.""" + with self.assertRaises(Exception): # Assuming that connect() raises Exception on failure + _get_dbs(0, "", 0, "", 0, False) # Empty connection strings + + def test_database_object(self) -> None: + """Test returned database objects are valid and not None.""" + db1_str: str = CONN_STRINGS[db.PostgreSQL] + db2_str: str = CONN_STRINGS[db.PostgreSQL] + db1, db2 = _get_dbs(1, db1_str, 0, db2_str, 0, False) + self.assertIsNotNone(db1) + self.assertIsNotNone(db2) + self.assertIsInstance(db1, Database) + self.assertIsInstance(db2, Database) + + def test_databases_are_different(self) -> None: + """Test separate connections for different databases.""" + db1_str: str = CONN_STRINGS[db.PostgreSQL] + db2_str: str = CONN_STRINGS[db.MySQL] + db1, db2 = _get_dbs(0, db1_str, 1, db2_str, 2, False) + with db1, db2: + self.assertIsNot(db1, db2) # Check that db1 and db2 are not the same object + + +class TestSetAge(unittest.TestCase): + def setUp(self) -> None: + self.database: Database = get_conn(db.PostgreSQL) + + def tearDown(self): + self.database.close() + + def test__set_age(self): + options = {} + _set_age(options, None, None, self.database) + assert len(options) == 0 + + options = {} + _set_age(options, "1d", None, self.database) + assert len(options) == 1 + assert options.get("max_update") is not None + + options = {} + _set_age(options, None, "1d", self.database) + assert len(options) == 1 + assert options.get("min_update") is not None + + options = {} + _set_age(options, "1d", "1d", self.database) + assert len(options) == 2 + assert options.get("max_update") is not None + assert options.get("min_update") is not None + + def test__set_age_db_query_failure(self): + with self.assertRaises(Exception): + options = {} + _set_age(options, "1d", "1d", self.mock_database) + + +class TestGetTableDiffer(unittest.TestCase): + def test__get_table_differ(self): + db1: Database + db2: Database + db1_str: str = CONN_STRINGS[db.PostgreSQL] + db2_str: str = CONN_STRINGS[db.PostgreSQL] + + db1, db2 = _get_dbs(1, db1_str, 0, db2_str, 0, False) + with db1, db2: + assert db1 == db2 + table_differ: TableDiffer = self._get_differ("auto", db1, db2) + assert isinstance(table_differ, JoinDiffer) + + table_differ: TableDiffer = self._get_differ("joindiff", db1, db2) + assert isinstance(table_differ, JoinDiffer) + + table_differ: TableDiffer = self._get_differ("hashdiff", db1, db2) + assert isinstance(table_differ, HashDiffer) + + db2_str: str = CONN_STRINGS[db.MySQL] + db1, db2 = _get_dbs(1, db1_str, 0, db2_str, 0, False) + with db1, db2: + assert db1 != db2 + table_differ: TableDiffer = self._get_differ("auto", db1, db2) + assert isinstance(table_differ, HashDiffer) + + table_differ: TableDiffer = self._get_differ("joindiff", db1, db2) + assert isinstance(table_differ, JoinDiffer) + + table_differ: TableDiffer = self._get_differ("hashdiff", db1, db2) + assert isinstance(table_differ, HashDiffer) + + @staticmethod + def _get_differ(algorithm, db1, db2): + return _get_table_differ(algorithm, db1, db2, False, 1, False, False, False, 1, None, None, None) + + +class TestGetExpandedColumns(DiffTestCase): + db_cls = MySQL + + def setUp(self): + super().setUp() + + def test__get_expanded_columns(self): + columns = ["user_id", "movie_id", "rating"] + kwargs = { + "db1": self.connection, + "schema1": self.src_schema, + "table1": self.table_src_name, + "db2": self.connection, + "schema2": self.dst_schema, + "table2": self.table_dst_name, + } + expanded_columns = _get_expanded_columns(columns, False, set(columns), **kwargs) + + assert len(expanded_columns) == 3 + assert len(set(expanded_columns) & set(columns)) == 3 + + def test__get_expanded_columns_case_sensitive(self): + columns = ["UserID", "MovieID", "Rating"] + kwargs = { + "db1": self.connection, + "schema1": self.src_schema, + "table1": self.table_src_name, + "db2": self.connection, + "schema2": self.dst_schema, + "table2": self.table_dst_name, + } + expanded_columns = _get_expanded_columns(columns, True, set(columns), **kwargs) + + assert len(expanded_columns) == 3 + assert len(set(expanded_columns) & set(columns)) == 3 + + +class TestGetThreads(unittest.TestCase): + def test__get_threads(self): + threaded, threads = _get_threads(None, None, None) + assert threaded + assert threads == 1 + + threaded, threads = _get_threads(None, 2, 3) + assert threaded + assert threads == 1 + + threaded, threads = _get_threads("serial", None, None) + assert not threaded + assert threads == 1 + + with self.assertRaises(AssertionError): + _get_threads("serial", 1, 2) + + threaded, threads = _get_threads("4", None, None) + assert threaded + assert threads == 4 + + with self.assertRaises(ValueError) as value_error: + _get_threads("auto", None, None) + assert str(value_error.exception) == "invalid literal for int() with base 10: 'auto'" + + threaded, threads = _get_threads(5, None, None) + assert threaded + assert threads == 5 + + threaded, threads = _get_threads(6, 7, 8) + assert threaded + assert threads == 6 + + with self.assertRaises(ValueError) as value_error: + _get_threads(0, None, None) + assert str(value_error.exception) == "Error: threads must be >= 1" + + with self.assertRaises(ValueError) as value_error: + _get_threads(-1, None, None) + assert str(value_error.exception) == "Error: threads must be >= 1" diff --git a/tests/test_mesh.py b/tests/test_mesh.py new file mode 100644 index 000000000..c0ee7c554 --- /dev/null +++ b/tests/test_mesh.py @@ -0,0 +1,132 @@ +import uuid + +from data_diff.abcs.database_types import String_UUID +from data_diff.databases import MySQL +from data_diff.table_segment import create_mesh_from_points +from data_diff.utils import ArithUUID, safezip +from tests.common import DiffTestCase, table_segment + + +# We do not need real tables, just any reference to them for proper object creation. +class TestDiffMesh(DiffTestCase): + db_cls = MySQL + + def test_meta_parameters_passed_from_coltypes_to_values(self): + key_types1 = [String_UUID(lowercase=True, uppercase=False)] + key_types2 = [String_UUID(lowercase=False, uppercase=True)] + + # side B is wider than side A to ensure there are "outer" regions. + min_uuid1 = uuid.UUID("11111111-1111-1111-1111-111111111111") + max_uuid1 = uuid.UUID("EEEEEEEE-EEEE-EEEE-EEEE-EEEEEEEEEEEE") + min_uuid2 = uuid.UUID("00000000-0000-0000-0000-000000000000") + max_uuid2 = uuid.UUID("FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF") + min_key1 = (key_types1[0].make_value(min_uuid1),) + max_key1 = (key_types1[0].make_value(max_uuid1),) + min_key2 = (key_types2[0].make_value(min_uuid2),) + max_key2 = (key_types2[0].make_value(max_uuid2),) + + # Verify that we pass the meta-parameters from col types to values: + assert isinstance(min_key1[0], ArithUUID) + assert isinstance(max_key1[0], ArithUUID) + assert isinstance(min_key2[0], ArithUUID) + assert isinstance(max_key2[0], ArithUUID) + assert min_key1[0].uuid == min_uuid1 + assert min_key1[0].lowercase == True + assert min_key1[0].uppercase == False + assert max_key1[0].uuid == max_uuid1 + assert max_key1[0].lowercase == True + assert max_key1[0].uppercase == False + assert min_key2[0].uuid == min_uuid2 + assert min_key2[0].lowercase == False + assert min_key2[0].uppercase == True + assert max_key2[0].uuid == max_uuid2 + assert max_key2[0].lowercase == False + assert max_key2[0].uppercase == True + + def test_meta_parameters_left_as_is_if_not_casted(self): + table1 = table_segment(self.connection, self.table_src_path, "id", "timestamp", case_sensitive=False) + key_types1 = [String_UUID(lowercase=True, uppercase=False)] + + min_uuid1 = uuid.UUID("11111111-1111-1111-1111-111111111111") + max_uuid1 = uuid.UUID("EEEEEEEE-EEEE-EEEE-EEEE-EEEEEEEEEEEE") + min_key1 = (key_types1[0].make_value(min_uuid1),) + max_key1 = (key_types1[0].make_value(max_uuid1),) + + btable1 = table1.new_key_bounds(min_key=min_key1, max_key=max_key1) + assert btable1.min_key[0] is min_key1[0] # by identity, not by equality + assert btable1.max_key[0] is max_key1[0] # by identity, not by equality + + def test_mesh_keys_meta_parameters_preserved(self): + table1 = table_segment(self.connection, self.table_src_path, "id", "timestamp", case_sensitive=False) + table2 = table_segment(self.connection, self.table_src_path, "id", "timestamp", case_sensitive=False) + key_types1 = [String_UUID(lowercase=True, uppercase=False)] + key_types2 = [String_UUID(lowercase=False, uppercase=True)] + + # side B is wider than side A to ensure there are "outer" regions. + min_uuid1 = uuid.UUID("11111111-1111-1111-1111-111111111111") + max_uuid1 = uuid.UUID("EEEEEEEE-EEEE-EEEE-EEEE-EEEEEEEEEEEE") + min_uuid2 = uuid.UUID("00000000-0000-0000-0000-000000000000") + max_uuid2 = uuid.UUID("FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF") + min_key1 = (key_types1[0].make_value(min_uuid1),) + max_key1 = (key_types1[0].make_value(max_uuid1),) + min_key2 = (key_types2[0].make_value(min_uuid2),) + max_key2 = (key_types2[0].make_value(max_uuid2),) + + # This is what TableDiffer._bisect_and_diff_tables() does, precisely (yes, using key1!): + btable1 = table1.new_key_bounds(min_key=min_key1, max_key=max_key1, key_types=key_types1) + btable2 = table2.new_key_bounds(min_key=min_key1, max_key=max_key1, key_types=key_types2) + + # Verify that both sides have proper (the side-specific) pk meta-parameters: + assert btable1.min_key[0].uuid == min_uuid1 + assert btable1.min_key[0].lowercase == True + assert btable1.min_key[0].uppercase == False + assert btable1.max_key[0].uuid == max_uuid1 + assert btable1.max_key[0].lowercase == True + assert btable1.max_key[0].uppercase == False + assert btable2.min_key[0].uuid == min_uuid1 + assert btable2.min_key[0].lowercase == False + assert btable2.min_key[0].uppercase == True + assert btable2.max_key[0].uuid == max_uuid1 + assert btable2.max_key[0].lowercase == False + assert btable2.max_key[0].uppercase == True + + # This is what TableDiffer._bisect_and_diff_tables() does, precisely: + points = [list(sorted(p)) for p in safezip(min_key1, min_key2, max_key1, max_key2)] + box_mesh = create_mesh_from_points(*points) + new_regions = [(p1, p2) for p1, p2 in box_mesh if p1 < p2 and not (p1 >= min_key1 and p2 <= max_key1)] + extra_tables = [ + ( + table1.new_key_bounds(min_key=p1, max_key=p2, key_types=key_types1), + table2.new_key_bounds(min_key=p1, max_key=p2, key_types=key_types2), + ) + for p1, p2 in new_regions + ] + + # Verify that extra ("outer") segments have the proper pk meta-parameters: + assert len(extra_tables) == 2 + + assert extra_tables[0][0].min_key[0].uuid == min_uuid2 + assert extra_tables[0][0].min_key[0].lowercase == True + assert extra_tables[0][0].min_key[0].uppercase == False + assert extra_tables[0][0].max_key[0].uuid == min_uuid1 + assert extra_tables[0][0].max_key[0].lowercase == True + assert extra_tables[0][0].max_key[0].uppercase == False + assert extra_tables[0][1].min_key[0].uuid == min_uuid2 + assert extra_tables[0][1].min_key[0].lowercase == False + assert extra_tables[0][1].min_key[0].uppercase == True + assert extra_tables[0][1].max_key[0].uuid == min_uuid1 + assert extra_tables[0][1].max_key[0].lowercase == False + assert extra_tables[0][1].max_key[0].uppercase == True + + assert extra_tables[1][0].min_key[0].uuid == max_uuid1 + assert extra_tables[1][0].min_key[0].lowercase == True + assert extra_tables[1][0].min_key[0].uppercase == False + assert extra_tables[1][0].max_key[0].uuid == max_uuid2 + assert extra_tables[1][0].max_key[0].lowercase == True + assert extra_tables[1][0].max_key[0].uppercase == False + assert extra_tables[1][1].min_key[0].uuid == max_uuid1 + assert extra_tables[1][1].min_key[0].lowercase == False + assert extra_tables[1][1].min_key[0].uppercase == True + assert extra_tables[1][1].max_key[0].uuid == max_uuid2 + assert extra_tables[1][1].max_key[0].lowercase == False + assert extra_tables[1][1].max_key[0].uppercase == True diff --git a/tests/test_postgresql.py b/tests/test_postgresql.py index b5e9fa10f..4d0402040 100644 --- a/tests/test_postgresql.py +++ b/tests/test_postgresql.py @@ -1,9 +1,12 @@ import unittest +from copy import deepcopy +from urllib.parse import quote -from data_diff.queries.api import table, commit -from data_diff import TableSegment, HashDiffer +from data_diff import TableSegment, HashDiffer, Database +from data_diff import connect_to_table from data_diff import databases as db -from tests.common import get_conn, random_table_suffix +from data_diff.queries.api import table, commit +from tests.common import get_conn, random_table_suffix, connect class TestUUID(unittest.TestCase): @@ -113,3 +116,43 @@ def test_100_fields(self): id_ = diff[0][1][0] result = (id_,) + tuple("1" for x in range(100)) self.assertEqual(diff, [("-", result)]) + + +class TestSpecialCharacterPassword(unittest.TestCase): + username: str = "test" + password: str = "passw!!!@rd" + + def setUp(self) -> None: + self.connection: Database = get_conn(db.PostgreSQL) + self.table_name = f"table{random_table_suffix()}" + + # Setup user with special character '@' in password + self.connection.query(f"DROP USER IF EXISTS {self.username};", None) + self.connection.query(f"CREATE USER {self.username} WITH PASSWORD '{self.password}';", None) + + def tearDown(self): + self.connection.query(f"DROP USER IF EXISTS {self.username};", None) + self.connection.close() + + def test_special_char_password(self): + db_config = deepcopy(self.connection._args) + db_config.update( + { + "driver": "postgresql", + "dbname": db_config.pop("database"), + "user": self.username, + "password": quote(self.password), + } + ) + + # verify pythonic connection method + connect_to_table(db_config, self.table_name) + + # verify connection method with URL string unquoted after it's verified + db_url = ( + f"postgresql://{db_config['user']}:{db_config['password']}@{db_config['host']}:" + f"{db_config.get('port', 5432)}/{db_config['dbname']}" + ) + + with connect(db_url) as connection_verified: + assert connection_verified._args.get("password") == self.password diff --git a/tests/test_utils.py b/tests/test_utils.py index 8972bd05d..712f3467c 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,6 +1,18 @@ import unittest +import re + +from data_diff.utils import ( + remove_passwords_in_dict, + match_regexps, + match_like, + number_to_human, + diff_int_dynamic_color_template, + dbt_diff_string_template, + columns_removed_template, + columns_added_template, + columns_type_changed_template, +) -from data_diff.utils import remove_passwords_in_dict, match_regexps, match_like, number_to_human from data_diff.__main__ import _remove_passwords_in_dict @@ -123,3 +135,79 @@ def test_number_to_human(self): assert number_to_human(-1000) == "-1k" assert number_to_human(-1000000) == "-1m" assert number_to_human(-1000000000) == "-1b" + + +class TestDiffIntDynamicColorTemplate(unittest.TestCase): + def test_string_input(self): + self.assertEqual(diff_int_dynamic_color_template("test_string"), "test_string") + + def test_positive_diff_value(self): + self.assertEqual(diff_int_dynamic_color_template(10), "[green]+10[/]") + + def test_negative_diff_value(self): + self.assertEqual(diff_int_dynamic_color_template(-10), "[red]-10[/]") + + def test_zero_diff(self): + self.assertEqual(diff_int_dynamic_color_template(0), "0") + + +class TestDbtDiffStringTemplateNoMock(unittest.TestCase): + def test_dbt_diff_string_template(self): + self.maxDiff = None + + expected_output = """ +rows PROD <> DEV +--------- ------ ------------ ------------------ +Total 10 20 [[green]+10[/]] +Added [green]+5[/] +Removed [red]-2[/] +Different 3 +Unchanged 5 + +columns # diff values +--------- --------------- +info values + +deps # data assets +------ --------------- +dep assets""" + + output = dbt_diff_string_template( + total_rows_table1=10, + total_rows_table2=20, + total_rows_diff=10, + rows_added=5, + rows_removed=2, + rows_updated=3, + rows_unchanged=5, + extra_info_dict={"info": "values"}, + extra_info_str="extra info", + is_cloud=False, + deps_impacts={"dep": "assets"}, + ) + + self.assertEqual(output, expected_output) + + +class TestColumnsTemplateMethods(unittest.TestCase): + def extract_columns_set(self, output): + # Extract quoted words by regex + output_list = re.findall(r"'(\w*)'", output) + # Convert list to set + output_set = set(output_list) + return output_set + + def test_columns_removed_template(self): + output = columns_removed_template({"column1", "column2"}) + self.assertIn("[red]Columns removed [-2]:[/]", output) + self.assertEqual(self.extract_columns_set(output), {"column1", "column2"}) + + def test_columns_added_template(self): + output = columns_added_template({"column1", "column2"}) + self.assertIn("[green]Columns added [+2]:", output) + self.assertEqual(self.extract_columns_set(output), {"column1", "column2"}) + + def test_columns_type_changed_template(self): + output = columns_type_changed_template({"column1", "column2"}) + self.assertIn("Type changed [2]: [green]", output) + self.assertEqual(self.extract_columns_set(output), {"column1", "column2"})