Skip to content

Commit

Permalink
fix(specs): browse response required properties
Browse files Browse the repository at this point in the history
  • Loading branch information
algolia-bot and millotp committed Jul 11, 2024
1 parent 761bbf5 commit f956b88
Show file tree
Hide file tree
Showing 7 changed files with 30 additions and 150 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,17 @@
from __future__ import annotations

from json import loads
from typing import Annotated, Any, Dict, Optional, Self
from typing import Any, Dict, Self

from pydantic import BaseModel, ConfigDict, Field
from pydantic import BaseModel, ConfigDict

from algoliasearch.ingestion.models.mapping_input import MappingInput


class ScheduleDateUtilsInput(BaseModel):
class DockerStreamsInput(BaseModel):
"""
Input for scheduled tasks whose source is of type `bigquery` and for which extracted data spans a fixed number of days.
DockerStreamsInput
"""

timeframe: Annotated[int, Field(le=30, strict=True, ge=1)] = Field(
description="Number of days in the past until the current day for which to extract Big Query data."
)
mapping: Optional[MappingInput] = None
streams: Dict[str, Any]

model_config = ConfigDict(
use_enum_values=True, populate_by_name=True, validate_assignment=True
Expand All @@ -33,7 +28,7 @@ def to_json(self) -> str:

@classmethod
def from_json(cls, json_str: str) -> Self:
"""Create an instance of ScheduleDateUtilsInput from a JSON string"""
"""Create an instance of DockerStreamsInput from a JSON string"""
return cls.from_dict(loads(json_str))

def to_dict(self) -> Dict[str, Any]:
Expand All @@ -51,27 +46,16 @@ def to_dict(self) -> Dict[str, Any]:
exclude={},
exclude_none=True,
)
if self.mapping:
_dict["mapping"] = self.mapping.to_dict()
return _dict

@classmethod
def from_dict(cls, obj: Dict) -> Self:
"""Create an instance of ScheduleDateUtilsInput from a dict"""
"""Create an instance of DockerStreamsInput from a dict"""
if obj is None:
return None

if not isinstance(obj, dict):
return cls.model_validate(obj)

_obj = cls.model_validate(
{
"timeframe": obj.get("timeframe"),
"mapping": (
MappingInput.from_dict(obj.get("mapping"))
if obj.get("mapping") is not None
else None
),
}
)
_obj = cls.model_validate({"streams": obj.get("streams")})
return _obj
83 changes: 0 additions & 83 deletions algoliasearch/ingestion/models/on_demand_date_utils_input.py

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from algoliasearch.ingestion.models.mapping_input import MappingInput


class StreamingUtilsInput(BaseModel):
class StreamingInput(BaseModel):
"""
Input for a `streaming` task whose source is of type `ga4BigqueryExport` and for which extracted data is continuously streamed.
"""
Expand All @@ -30,7 +30,7 @@ def to_json(self) -> str:

@classmethod
def from_json(cls, json_str: str) -> Self:
"""Create an instance of StreamingUtilsInput from a JSON string"""
"""Create an instance of StreamingInput from a JSON string"""
return cls.from_dict(loads(json_str))

def to_dict(self) -> Dict[str, Any]:
Expand All @@ -54,7 +54,7 @@ def to_dict(self) -> Dict[str, Any]:

@classmethod
def from_dict(cls, obj: Dict) -> Self:
"""Create an instance of StreamingUtilsInput from a dict"""
"""Create an instance of StreamingInput from a dict"""
if obj is None:
return None

Expand Down
44 changes: 10 additions & 34 deletions algoliasearch/ingestion/models/task_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,32 +11,21 @@

from pydantic import BaseModel, ValidationError, model_serializer

from algoliasearch.ingestion.models.on_demand_date_utils_input import (
OnDemandDateUtilsInput,
)
from algoliasearch.ingestion.models.schedule_date_utils_input import (
ScheduleDateUtilsInput,
)
from algoliasearch.ingestion.models.docker_streams_input import DockerStreamsInput
from algoliasearch.ingestion.models.shopify_input import ShopifyInput
from algoliasearch.ingestion.models.streaming_utils_input import StreamingUtilsInput
from algoliasearch.ingestion.models.streaming_input import StreamingInput


class TaskInput(BaseModel):
"""
Configuration of the task, depending on its type.
"""

oneof_schema_1_validator: Optional[OnDemandDateUtilsInput] = None
oneof_schema_2_validator: Optional[ScheduleDateUtilsInput] = None
oneof_schema_3_validator: Optional[StreamingUtilsInput] = None
oneof_schema_4_validator: Optional[ShopifyInput] = None
oneof_schema_1_validator: Optional[StreamingInput] = None
oneof_schema_2_validator: Optional[DockerStreamsInput] = None
oneof_schema_3_validator: Optional[ShopifyInput] = None
actual_instance: Optional[
Union[
OnDemandDateUtilsInput,
ScheduleDateUtilsInput,
ShopifyInput,
StreamingUtilsInput,
]
Union[DockerStreamsInput, ShopifyInput, StreamingInput]
] = None

def __init__(self, *args, **kwargs) -> None:
Expand All @@ -56,14 +45,7 @@ def __init__(self, *args, **kwargs) -> None:
@model_serializer
def unwrap_actual_instance(
self,
) -> Optional[
Union[
OnDemandDateUtilsInput,
ScheduleDateUtilsInput,
ShopifyInput,
StreamingUtilsInput,
]
]:
) -> Optional[Union[DockerStreamsInput, ShopifyInput, StreamingInput]]:
"""
Unwraps the `actual_instance` when calling the `to_json` method.
"""
Expand All @@ -80,19 +62,13 @@ def from_json(cls, json_str: str) -> Self:
error_messages = []

try:
instance.actual_instance = OnDemandDateUtilsInput.from_json(json_str)
instance.actual_instance = StreamingInput.from_json(json_str)

return instance
except (ValidationError, ValueError) as e:
error_messages.append(str(e))
try:
instance.actual_instance = ScheduleDateUtilsInput.from_json(json_str)

return instance
except (ValidationError, ValueError) as e:
error_messages.append(str(e))
try:
instance.actual_instance = StreamingUtilsInput.from_json(json_str)
instance.actual_instance = DockerStreamsInput.from_json(json_str)

return instance
except (ValidationError, ValueError) as e:
Expand All @@ -105,7 +81,7 @@ def from_json(cls, json_str: str) -> Self:
error_messages.append(str(e))

raise ValueError(
"No match found when deserializing the JSON string into TaskInput with oneOf schemas: OnDemandDateUtilsInput, ScheduleDateUtilsInput, ShopifyInput, StreamingUtilsInput. Details: "
"No match found when deserializing the JSON string into TaskInput with oneOf schemas: DockerStreamsInput, ShopifyInput, StreamingInput. Details: "
+ ", ".join(error_messages)
)

Expand Down
5 changes: 3 additions & 2 deletions algoliasearch/ingestion/models/transformation.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,9 @@ class Transformation(BaseModel):
name: StrictStr = Field(
description="The uniquely identified name of your transformation."
)
description: StrictStr = Field(
description="A descriptive name for your transformation of what it does."
description: Optional[StrictStr] = Field(
default=None,
description="A descriptive name for your transformation of what it does.",
)
created_at: StrictStr = Field(
description="Date of creation in RFC 3339 format.", alias="createdAt"
Expand Down
7 changes: 4 additions & 3 deletions algoliasearch/ingestion/models/transformation_create.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from __future__ import annotations

from json import loads
from typing import Any, Dict, Self
from typing import Any, Dict, Optional, Self

from pydantic import BaseModel, ConfigDict, Field, StrictStr

Expand All @@ -21,8 +21,9 @@ class TransformationCreate(BaseModel):
name: StrictStr = Field(
description="The uniquely identified name of your transformation."
)
description: StrictStr = Field(
description="A descriptive name for your transformation of what it does."
description: Optional[StrictStr] = Field(
default=None,
description="A descriptive name for your transformation of what it does.",
)

model_config = ConfigDict(
Expand Down
3 changes: 2 additions & 1 deletion algoliasearch/search/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
from algoliasearch.search.models.batch_response import BatchResponse
from algoliasearch.search.models.batch_write_params import BatchWriteParams
from algoliasearch.search.models.browse_params import BrowseParams
from algoliasearch.search.models.browse_params_object import BrowseParamsObject
from algoliasearch.search.models.browse_response import BrowseResponse
from algoliasearch.search.models.created_at_response import CreatedAtResponse
from algoliasearch.search.models.delete_api_key_response import DeleteApiKeyResponse
Expand Down Expand Up @@ -313,7 +314,7 @@ async def browse_objects(
self,
index_name: str,
aggregator: Optional[Callable[[BrowseResponse], None]],
browse_params: Optional[BrowseParams] = None,
browse_params: Optional[BrowseParamsObject] = BrowseParamsObject(),
request_options: Optional[Union[dict, RequestOptions]] = None,
) -> BrowseResponse:
"""
Expand Down

0 comments on commit f956b88

Please sign in to comment.