Skip to content

Commit

Permalink
fix: Handle type-mapping with SQLConnector.jsonschema_to_sql
Browse files Browse the repository at this point in the history
  • Loading branch information
edgarrmondragon committed Dec 18, 2024
1 parent f8d7fcc commit fbc5c50
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 15 deletions.
2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,6 @@ target-version = "py39"

[tool.ruff.lint]
ignore = [
"ANN101", # missing-type-self
"ANN102", # missing-type-cls
"ANN201",
"TD",
"D",
Expand Down
28 changes: 15 additions & 13 deletions target_snowflake/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import urllib.parse
from enum import Enum
from functools import cached_property
from operator import contains, eq
from operator import eq
from pathlib import Path
from typing import TYPE_CHECKING, Any, cast

Expand All @@ -13,7 +13,7 @@
from cryptography.hazmat.primitives import serialization
from singer_sdk import typing as th
from singer_sdk.connectors import SQLConnector
from singer_sdk.connectors.sql import FullyQualifiedName
from singer_sdk.connectors.sql import FullyQualifiedName, JSONSchemaToSQL
from singer_sdk.exceptions import ConfigValidationError
from snowflake.sqlalchemy import URL
from snowflake.sqlalchemy.base import SnowflakeIdentifierPreparer
Expand All @@ -27,6 +27,7 @@

from sqlalchemy.engine import Engine

# TODO: Remove this when when JSON schema to SQL is stable
SNOWFLAKE_MAX_STRING_LENGTH = 16777216


Expand Down Expand Up @@ -89,6 +90,8 @@ class SnowflakeConnector(SQLConnector):
allow_merge_upsert: bool = False # Whether MERGE UPSERT is supported.
allow_temp_tables: bool = True # Whether temp tables are supported.

max_varchar_length = 16_777_216

def __init__(self, *args: Any, **kwargs: Any) -> None:
self.table_cache: dict = {}
self.schema_cache: dict = {}
Expand Down Expand Up @@ -317,6 +320,16 @@ def _conform_max_length(jsonschema_type): # noqa: ANN205, ANN001
jsonschema_type["maxLength"] = SNOWFLAKE_MAX_STRING_LENGTH
return jsonschema_type

@cached_property
def jsonschema_to_sql(self) -> JSONSchemaToSQL:
to_sql = super().jsonschema_to_sql
to_sql.register_type_handler("integer", NUMBER)
to_sql.register_type_handler("object", VARIANT)
to_sql.register_type_handler("array", VARIANT)
to_sql.register_type_handler("number", sct.DOUBLE)
to_sql.register_format_handler("date-time", TIMESTAMP_NTZ)
return to_sql

def to_sql_type(self, jsonschema_type: dict) -> sqlalchemy.types.TypeEngine:
"""Return a JSON Schema representation of the provided type.
Expand All @@ -336,23 +349,12 @@ def to_sql_type(self, jsonschema_type: dict) -> sqlalchemy.types.TypeEngine:
maxlength = jsonschema_type.get("maxLength", SNOWFLAKE_MAX_STRING_LENGTH)
# define type maps
string_submaps = [
TypeMap(eq, TIMESTAMP_NTZ(), "date-time"),
TypeMap(contains, sqlalchemy.types.TIME(), "time"),
TypeMap(eq, sqlalchemy.types.DATE(), "date"),
TypeMap(eq, sqlalchemy.types.VARCHAR(maxlength), None),
]
type_maps = [
TypeMap(th._jsonschema_type_check, NUMBER(), ("integer",)), # noqa: SLF001
TypeMap(th._jsonschema_type_check, VARIANT(), ("object",)), # noqa: SLF001
TypeMap(th._jsonschema_type_check, VARIANT(), ("array",)), # noqa: SLF001
TypeMap(th._jsonschema_type_check, sct.DOUBLE(), ("number",)), # noqa: SLF001
]
# apply type maps
if th._jsonschema_type_check(jsonschema_type, ("string",)): # noqa: SLF001
datelike_type = th.get_datelike_property_type(jsonschema_type)
target_type = evaluate_typemaps(string_submaps, datelike_type, target_type)
else:
target_type = evaluate_typemaps(type_maps, jsonschema_type, target_type)

return cast(sqlalchemy.types.TypeEngine, target_type)

Expand Down

0 comments on commit fbc5c50

Please sign in to comment.