Skip to content

Commit 58200d2

Browse files
Merge branch 'fix/add-columns' of https://github.com/VanOord/dbt-sqlserver into fix/add-columns
2 parents cabacdb + 2497527 commit 58200d2

File tree

5 files changed

+93
-5
lines changed

5 files changed

+93
-5
lines changed

.github/workflows/release-version.yml

+3-3
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,9 @@
22
name: Release new version
33

44
on: # yamllint disable-line rule:truthy
5-
push:
6-
tags:
7-
- 'v*'
5+
release:
6+
types:
7+
- published
88

99
jobs:
1010
release-version:

dbt/adapters/sqlserver/__version__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
version = "1.7.2"
1+
version = "1.7.3"

dbt/adapters/sqlserver/sql_server_adapter.py

+31
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
# https://github.com/microsoft/dbt-fabric/blob/main/dbt/adapters/fabric/fabric_adapter.py
2+
from typing import Optional
3+
4+
import dbt.exceptions
25
from dbt.adapters.fabric import FabricAdapter
6+
from dbt.contracts.graph.nodes import ConstraintType, ModelLevelConstraint
37

48
from dbt.adapters.sqlserver.sql_server_column import SQLServerColumn
59
from dbt.adapters.sqlserver.sql_server_configs import SQLServerConfigs
@@ -50,6 +54,33 @@ class SQLServerAdapter(FabricAdapter):
5054
# return columns
5155
# endregion
5256

57+
@classmethod
58+
def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[str]:
59+
constraint_prefix = "add constraint "
60+
column_list = ", ".join(constraint.columns)
61+
62+
if constraint.name is None:
63+
raise dbt.exceptions.DbtDatabaseError(
64+
"Constraint name cannot be empty. Provide constraint name - column "
65+
+ column_list
66+
+ " and run the project again."
67+
)
68+
69+
if constraint.type == ConstraintType.unique:
70+
return constraint_prefix + f"{constraint.name} unique nonclustered({column_list})"
71+
elif constraint.type == ConstraintType.primary_key:
72+
return constraint_prefix + f"{constraint.name} primary key nonclustered({column_list})"
73+
elif constraint.type == ConstraintType.foreign_key and constraint.expression:
74+
return (
75+
constraint_prefix
76+
+ f"{constraint.name} foreign key({column_list}) references "
77+
+ constraint.expression
78+
)
79+
elif constraint.type == ConstraintType.custom and constraint.expression:
80+
return f"{constraint_prefix}{constraint.expression}"
81+
else:
82+
return None
83+
5384
@classmethod
5485
def date_function(cls):
5586
return "getdate()"
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
{% macro sqlserver__get_binding_char() %}
2+
{{ return('?') }}
3+
{% endmacro %}
4+
5+
{% macro sqlserver__get_batch_size() %}
6+
{{ return(400) }}
7+
{% endmacro %}
8+
9+
{% macro calc_batch_size(num_columns) %}
10+
{#
11+
SQL Server allows for a max of 2098 parameters in a single statement.
12+
Check if the max_batch_size fits with the number of columns, otherwise
13+
reduce the batch size so it fits.
14+
#}
15+
{% set max_batch_size = get_batch_size() %}
16+
{% set calculated_batch = (2098 / num_columns)|int %}
17+
{% set batch_size = [max_batch_size, calculated_batch] | min %}
18+
19+
{{ return(batch_size) }}
20+
{% endmacro %}
21+
22+
{% macro sqlserver__load_csv_rows(model, agate_table) %}
23+
{% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}
24+
{% set batch_size = calc_batch_size(agate_table.column_names|length) %}
25+
{% set bindings = [] %}
26+
{% set statements = [] %}
27+
28+
{{ log("Inserting batches of " ~ batch_size ~ " records") }}
29+
30+
{% for chunk in agate_table.rows | batch(batch_size) %}
31+
{% set bindings = [] %}
32+
33+
{% for row in chunk %}
34+
{% do bindings.extend(row) %}
35+
{% endfor %}
36+
37+
{% set sql %}
38+
insert into {{ this.render() }} ({{ cols_sql }}) values
39+
{% for row in chunk -%}
40+
({%- for column in agate_table.column_names -%}
41+
{{ get_binding_char() }}
42+
{%- if not loop.last%},{%- endif %}
43+
{%- endfor -%})
44+
{%- if not loop.last%},{%- endif %}
45+
{%- endfor %}
46+
{% endset %}
47+
48+
{% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}
49+
50+
{% if loop.index0 == 0 %}
51+
{% do statements.append(sql) %}
52+
{% endif %}
53+
{% endfor %}
54+
55+
{# Return SQL so we can render it out into the compiled files #}
56+
{{ return(statements[0]) }}
57+
{% endmacro %}

tests/functional/adapter/test_seed.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -243,7 +243,7 @@ def test_custom_batch_size(self, project, logs_dir):
243243
logs = "".join(fp.readlines())
244244
# this is changed from 350.
245245
# Fabric goes -1 of min batch of (2100/number of columns -1) or 400
246-
assert "Inserting batches of 349.0 records" in logs
246+
assert "Inserting batches of 349 records" in logs
247247

248248

249249
class SeedConfigBase:

0 commit comments

Comments
 (0)