Skip to content

Commit

Permalink
Rely entirely on Singer SDK JSON Schema to SQL type conversion
Browse files Browse the repository at this point in the history
  • Loading branch information
edgarrmondragon committed Dec 19, 2024
1 parent fbc5c50 commit 18c449a
Showing 1 changed file with 2 additions and 64 deletions.
66 changes: 2 additions & 64 deletions target_snowflake/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,13 @@
import urllib.parse
from enum import Enum
from functools import cached_property
from operator import eq
from pathlib import Path
from typing import TYPE_CHECKING, Any, cast
from typing import TYPE_CHECKING, Any

import snowflake.sqlalchemy.custom_types as sct
import sqlalchemy
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from singer_sdk import typing as th
from singer_sdk.connectors import SQLConnector
from singer_sdk.connectors.sql import FullyQualifiedName, JSONSchemaToSQL
from singer_sdk.exceptions import ConfigValidationError
Expand All @@ -27,31 +25,6 @@

from sqlalchemy.engine import Engine

# TODO: Remove this when when JSON schema to SQL is stable
SNOWFLAKE_MAX_STRING_LENGTH = 16777216


class TypeMap:
def __init__(self, operator, map_value, match_value=None) -> None: # noqa: ANN001
self.operator = operator
self.map_value = map_value
self.match_value = match_value

def match(self, compare_value): # noqa: ANN001
try:
if self.match_value:
return self.operator(compare_value, self.match_value)
return self.operator(compare_value)
except TypeError:
return False


def evaluate_typemaps(type_maps, compare_value, unmatched_value): # noqa: ANN001
for type_map in type_maps:
if type_map.match(compare_value):
return type_map.map_value
return unmatched_value


class SnowflakeFullyQualifiedName(FullyQualifiedName):
def __init__(
Expand Down Expand Up @@ -312,16 +285,9 @@ def get_column_alter_ddl(
},
)

@staticmethod
def _conform_max_length(jsonschema_type): # noqa: ANN205, ANN001
"""Alter jsonschema representations to limit max length to Snowflake's VARCHAR length."""
max_length = jsonschema_type.get("maxLength")
if max_length and max_length > SNOWFLAKE_MAX_STRING_LENGTH:
jsonschema_type["maxLength"] = SNOWFLAKE_MAX_STRING_LENGTH
return jsonschema_type

@cached_property
def jsonschema_to_sql(self) -> JSONSchemaToSQL:
# https://docs.snowflake.com/en/sql-reference/intro-summary-data-types.html
to_sql = super().jsonschema_to_sql
to_sql.register_type_handler("integer", NUMBER)
to_sql.register_type_handler("object", VARIANT)
Expand All @@ -330,34 +296,6 @@ def jsonschema_to_sql(self) -> JSONSchemaToSQL:
to_sql.register_format_handler("date-time", TIMESTAMP_NTZ)
return to_sql

def to_sql_type(self, jsonschema_type: dict) -> sqlalchemy.types.TypeEngine:
"""Return a JSON Schema representation of the provided type.
Uses custom Snowflake types from [snowflake-sqlalchemy](https://github.com/snowflakedb/snowflake-sqlalchemy/blob/main/src/snowflake/sqlalchemy/custom_types.py)
Args:
jsonschema_type: The JSON Schema representation of the source type.
Returns:
The SQLAlchemy type representation of the data type.
"""
# start with default implementation
jsonschema_type = SnowflakeConnector._conform_max_length(jsonschema_type)
target_type = super().to_sql_type(jsonschema_type)
# snowflake max and default varchar length
# https://docs.snowflake.com/en/sql-reference/intro-summary-data-types.html
maxlength = jsonschema_type.get("maxLength", SNOWFLAKE_MAX_STRING_LENGTH)
# define type maps
string_submaps = [
TypeMap(eq, sqlalchemy.types.VARCHAR(maxlength), None),
]
# apply type maps
if th._jsonschema_type_check(jsonschema_type, ("string",)): # noqa: SLF001
datelike_type = th.get_datelike_property_type(jsonschema_type)
target_type = evaluate_typemaps(string_submaps, datelike_type, target_type)

return cast(sqlalchemy.types.TypeEngine, target_type)

def schema_exists(self, schema_name: str) -> bool:
if schema_name in self.schema_cache:
return True
Expand Down

0 comments on commit 18c449a

Please sign in to comment.