From 70b07a65e4f2b551fe5c11ddfe144e40927b8dd3 Mon Sep 17 00:00:00 2001 From: Kemp Po Date: Sun, 24 Aug 2025 10:48:27 +0800 Subject: [PATCH] feat: configurable upload limits --- dbt_project.yml | 3 +++ macros/upload_results/upload_results.sql | 6 +----- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/dbt_project.yml b/dbt_project.yml index 9e7db313..2ac9368f 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -21,3 +21,6 @@ models: columns: '{{ target.name != "databricks" and target.type != "sqlserver" }}' relation: '{{ target.type != "sqlserver" }}' +as_columnstore: False + insert_batch_size: + models: "{{ 50 if target.type in ['bigquery', 'trino'] else 100 }}" + default: "{{ 300 if target.type in ['bigquery', 'trino'] else 5000 }}" \ No newline at end of file diff --git a/macros/upload_results/upload_results.sql b/macros/upload_results/upload_results.sql index 114a667d..23ed1a3f 100644 --- a/macros/upload_results/upload_results.sql +++ b/macros/upload_results/upload_results.sql @@ -19,11 +19,7 @@ {% set objects = dbt_artifacts.get_dataset_content(dataset) %} {# Upload in chunks to reduce the query size #} - {% if dataset == 'models' %} - {% set upload_limit = 50 if target.type == 'bigquery' else 100 %} - {% else %} - {% set upload_limit = 300 if target.type == 'bigquery' else 5000 %} - {% endif %} + {% set upload_limit = var('insert_batch_size')[dataset if dataset == 'models' else 'default'] %} {# Loop through each chunk in turn #} {% for i in range(0, objects | length, upload_limit) -%}