Skip to content

Commit

Permalink
Merge branch 'main' into updated-branding
Browse files Browse the repository at this point in the history
  • Loading branch information
edgarrmondragon authored Mar 27, 2023
2 parents 2c1228c + 3d922bd commit c88c909
Show file tree
Hide file tree
Showing 10 changed files with 22 additions and 26 deletions.
2 changes: 0 additions & 2 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@
from pathlib import Path

sys.path.insert(0, str(Path("..").resolve()))
# sys.path.insert(0, os.path.abspath("../singer_sdk"))
# sys.path.insert(0, os.path.abspath("/Users/ajsteers/Source/sdk"))


# -- Project information -----------------------------------------------------
Expand Down
24 changes: 12 additions & 12 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,7 @@ select = [
"TCH", # flake8-type-checking
"ARG", # flake8-unused-arguments
"PTH", # flake8-use-pathlib
"ERA", # eradicate
]
src = ["samples", "singer_sdk", "tests"]
target-version = "py37"
Expand Down
2 changes: 0 additions & 2 deletions samples/sample_tap_gitlab/gitlab_rest_streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,8 +163,6 @@ class EpicsStream(ProjectBasedStream):
Property("downvotes", IntegerType),
).to_dict()

# schema_filepath = SCHEMAS_DIR / "epics.json"

def get_child_context(
self,
record: dict,
Expand Down
11 changes: 8 additions & 3 deletions samples/sample_tap_google_analytics/ga_tap_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,14 +50,19 @@ def prepare_request_payload(
next_page_token: Any | None, # noqa: ARG002
) -> dict | None:
"""Prepare the data payload for the REST API request."""
# params = self.get_url_params(context, next_page_token)
request_def = {
"viewId": self.config["view_id"],
"metrics": [{"expression": m} for m in self.metrics],
"dimensions": [{"name": d} for d in self.dimensions],
# "orderBys": [
# {"fieldName": "ga:sessions", "sortOrder": "DESCENDING"},
# {"fieldName": "ga:pageviews", "sortOrder": "DESCENDING"},
# { # noqa: ERA001
# "fieldName": "ga:sessions", # noqa: ERA001
# "sortOrder": "DESCENDING", # noqa: ERA001
# },
# { # noqa: ERA001
# "fieldName": "ga:pageviews", # noqa: ERA001
# "sortOrder": "DESCENDING", # noqa: ERA001
# },
# ],
}
if self.config.get("start_date"):
Expand Down
2 changes: 0 additions & 2 deletions singer_sdk/connectors/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -831,10 +831,8 @@ def merge_sql_types(
# Gathering Type to match variables
# sent in _adapt_column_type
current_type = sql_types[0]
# sql_type = sql_types[1]

# Getting the length of each type
# current_type_len: int = getattr(sql_types[0], "length", 0)
sql_type_len: int = getattr(sql_types[1], "length", 0)
if sql_type_len is None:
sql_type_len = 0
Expand Down
2 changes: 1 addition & 1 deletion singer_sdk/sinks/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def _check_conformed_names_not_duplicated(
Raises:
ConformedNameClashException: if duplicates found.
"""
# group: {'_a': ['1_a'], 'abc': ['aBc', 'abC']}
# group: {'_a': ['1_a'], 'abc': ['aBc', 'abC']} # noqa: ERA001
grouped = defaultdict(list)
for k, v in conformed_property_names.items():
grouped[v].append(k)
Expand Down
2 changes: 0 additions & 2 deletions singer_sdk/target_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,6 @@ def _process_record_message(self, message_dict: dict) -> None:

stream_name = message_dict["stream"]
for stream_map in self.mapper.stream_maps[stream_name]:
# new_schema = helpers._float_to_decimal(new_schema)
raw_record = copy.copy(message_dict["record"])
transformed_record = stream_map.transform(raw_record)
if transformed_record is None:
Expand Down Expand Up @@ -376,7 +375,6 @@ def _process_schema_message(self, message_dict: dict) -> None:
key_properties,
)
for stream_map in self.mapper.stream_maps[stream_name]:
# new_schema = helpers._float_to_decimal(new_schema)
_ = self.get_sink(
stream_map.stream_alias,
schema=stream_map.transformed_schema,
Expand Down
1 change: 0 additions & 1 deletion tests/core/test_catalog_selection.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,6 @@ def test_schema_selection(
mask,
logging.getLogger(),
)
# selected_schema["properties"]["required"] = []
assert (
selected_schema["properties"]
== PropertiesList(
Expand Down
1 change: 0 additions & 1 deletion tests/core/test_mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,6 @@ def sample_stream():
def transform_stream_maps():
return {
"repositories": {
# "__source__": "repositories",
"repo_name": "_['name']",
"email_domain": "owner_email.split('@')[1]",
"email_hash": "md5(config['hash_seed'] + owner_email)",
Expand Down

0 comments on commit c88c909

Please sign in to comment.