Skip to content

Commit

Permalink
feat(specs): add v2 endpoints for ingestion
Browse files Browse the repository at this point in the history
algolia/api-clients-automation#3416

Co-authored-by: algolia-bot <accounts+algolia-api-client-bot@algolia.com>
Co-authored-by: Clément Vannicatte <vannicattec@gmail.com>
algolia-bot and shortcuts committed Jul 25, 2024
1 parent befa44c commit 3d4ca53
Showing 4 changed files with 268 additions and 0 deletions.
90 changes: 90 additions & 0 deletions algoliasearch/ingestion/client.py
Original file line number Diff line number Diff line change
@@ -34,6 +34,7 @@
from algoliasearch.ingestion.models.authentication_update_response import (
AuthenticationUpdateResponse,
)
from algoliasearch.ingestion.models.batch_write_params import BatchWriteParams
from algoliasearch.ingestion.models.delete_response import DeleteResponse
from algoliasearch.ingestion.models.destination import Destination
from algoliasearch.ingestion.models.destination_create import DestinationCreate
@@ -3220,6 +3221,95 @@ async def list_transformations(
await self.list_transformations_with_http_info(sort, order, request_options)
).deserialize(ListTransformationsResponse)

async def push_task_with_http_info(
self,
task_id: Annotated[
StrictStr, Field(description="Unique identifier of a task.")
],
batch_write_params: Annotated[
BatchWriteParams,
Field(
description="Request body of a Search API `batch` request that will be pushed in the Connectors pipeline."
),
],
request_options: Optional[Union[dict, RequestOptions]] = None,
) -> ApiResponse[str]:
"""
Push a `batch` request payload through the Pipeline. You can check the status of task pushes with the observability endpoints.
Required API Key ACLs:
- addObject
- deleteIndex
- editSettings
:param task_id: Unique identifier of a task. (required)
:type task_id: str
:param batch_write_params: Request body of a Search API `batch` request that will be pushed in the Connectors pipeline. (required)
:type batch_write_params: BatchWriteParams
:param request_options: The request options to send along with the query, they will be merged with the transporter base parameters (headers, query params, timeouts, etc.). (optional)
:return: Returns the raw algoliasearch 'APIResponse' object.
"""

if task_id is None:
raise ValueError(
"Parameter `task_id` is required when calling `push_task`."
)

if batch_write_params is None:
raise ValueError(
"Parameter `batch_write_params` is required when calling `push_task`."
)

_data = {}
if batch_write_params is not None:
_data = batch_write_params

return await self._transporter.request(
verb=Verb.POST,
path="/2/tasks/{taskID}/push".replace(
"{taskID}", quote(str(task_id), safe="")
),
request_options=self._request_options.merge(
data=dumps(bodySerializer(_data)),
user_request_options=request_options,
),
use_read_transporter=False,
)

async def push_task(
self,
task_id: Annotated[
StrictStr, Field(description="Unique identifier of a task.")
],
batch_write_params: Annotated[
BatchWriteParams,
Field(
description="Request body of a Search API `batch` request that will be pushed in the Connectors pipeline."
),
],
request_options: Optional[Union[dict, RequestOptions]] = None,
) -> RunResponse:
"""
Push a `batch` request payload through the Pipeline. You can check the status of task pushes with the observability endpoints.
Required API Key ACLs:
- addObject
- deleteIndex
- editSettings
:param task_id: Unique identifier of a task. (required)
:type task_id: str
:param batch_write_params: Request body of a Search API `batch` request that will be pushed in the Connectors pipeline. (required)
:type batch_write_params: BatchWriteParams
:param request_options: The request options to send along with the query, they will be merged with the transporter base parameters (headers, query params, timeouts, etc.). (optional)
:return: Returns the deserialized response in a 'RunResponse' result object.
"""
return (
await self.push_task_with_http_info(
task_id, batch_write_params, request_options
)
).deserialize(RunResponse)

async def run_task_with_http_info(
self,
task_id: Annotated[
33 changes: 33 additions & 0 deletions algoliasearch/ingestion/models/action.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# coding: utf-8

"""
Code generated by OpenAPI Generator (https://openapi-generator.tech), manual changes will be lost - read more on https://github.com/algolia/api-clients-automation. DO NOT EDIT.
"""

from __future__ import annotations

from enum import Enum
from json import loads
from typing import Self


class Action(str, Enum):
"""
Type of indexing operation.
"""

"""
allowed enum values
"""
ADDOBJECT = "addObject"
UPDATEOBJECT = "updateObject"
PARTIALUPDATEOBJECT = "partialUpdateObject"
PARTIALUPDATEOBJECTNOCREATE = "partialUpdateObjectNoCreate"
DELETEOBJECT = "deleteObject"
DELETE = "delete"
CLEAR = "clear"

@classmethod
def from_json(cls, json_str: str) -> Self:
"""Create an instance of Action from a JSON string"""
return cls(loads(json_str))
68 changes: 68 additions & 0 deletions algoliasearch/ingestion/models/batch_request.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
# coding: utf-8

"""
Code generated by OpenAPI Generator (https://openapi-generator.tech), manual changes will be lost - read more on https://github.com/algolia/api-clients-automation. DO NOT EDIT.
"""

from __future__ import annotations

from json import loads
from typing import Any, Dict, Self

from pydantic import BaseModel, ConfigDict, Field

from algoliasearch.ingestion.models.action import Action


class BatchRequest(BaseModel):
"""
BatchRequest
"""

action: Action
body: Dict[str, Any] = Field(
description="Operation arguments (varies with specified `action`)."
)

model_config = ConfigDict(
use_enum_values=True, populate_by_name=True, validate_assignment=True
)

def to_json(self) -> str:
return self.model_dump_json(by_alias=True, exclude_unset=True)

@classmethod
def from_json(cls, json_str: str) -> Self:
"""Create an instance of BatchRequest from a JSON string"""
return cls.from_dict(loads(json_str))

def to_dict(self) -> Dict[str, Any]:
"""Return the dictionary representation of the model using alias.
This has the following differences from calling pydantic's
`self.model_dump(by_alias=True)`:
* `None` is only added to the output dict for nullable fields that
were set at model initialization. Other fields with value `None`
are ignored.
"""
_dict = self.model_dump(
by_alias=True,
exclude={},
exclude_none=True,
)
return _dict

@classmethod
def from_dict(cls, obj: Dict) -> Self:
"""Create an instance of BatchRequest from a dict"""
if obj is None:
return None

if not isinstance(obj, dict):
return cls.model_validate(obj)

_obj = cls.model_validate(
{"action": obj.get("action"), "body": obj.get("body")}
)
return _obj
77 changes: 77 additions & 0 deletions algoliasearch/ingestion/models/batch_write_params.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
# coding: utf-8

"""
Code generated by OpenAPI Generator (https://openapi-generator.tech), manual changes will be lost - read more on https://github.com/algolia/api-clients-automation. DO NOT EDIT.
"""

from __future__ import annotations

from json import loads
from typing import Any, Dict, List, Self

from pydantic import BaseModel, ConfigDict

from algoliasearch.ingestion.models.batch_request import BatchRequest


class BatchWriteParams(BaseModel):
"""
Batch parameters.
"""

requests: List[BatchRequest]

model_config = ConfigDict(
use_enum_values=True, populate_by_name=True, validate_assignment=True
)

def to_json(self) -> str:
return self.model_dump_json(by_alias=True, exclude_unset=True)

@classmethod
def from_json(cls, json_str: str) -> Self:
"""Create an instance of BatchWriteParams from a JSON string"""
return cls.from_dict(loads(json_str))

def to_dict(self) -> Dict[str, Any]:
"""Return the dictionary representation of the model using alias.
This has the following differences from calling pydantic's
`self.model_dump(by_alias=True)`:
* `None` is only added to the output dict for nullable fields that
were set at model initialization. Other fields with value `None`
are ignored.
"""
_dict = self.model_dump(
by_alias=True,
exclude={},
exclude_none=True,
)
_items = []
if self.requests:
for _item in self.requests:
if _item:
_items.append(_item.to_dict())
_dict["requests"] = _items
return _dict

@classmethod
def from_dict(cls, obj: Dict) -> Self:
"""Create an instance of BatchWriteParams from a dict"""
if obj is None:
return None

if not isinstance(obj, dict):
return cls.model_validate(obj)

_obj = cls.model_validate(
{
"requests": (
[BatchRequest.from_dict(_item) for _item in obj.get("requests")]
if obj.get("requests") is not None
else None
)
}
)
return _obj

0 comments on commit 3d4ca53

Please sign in to comment.