diff --git a/elasticsearch/_async/client/inference.py b/elasticsearch/_async/client/inference.py
index 2c9cfdda5..2b580ae84 100644
--- a/elasticsearch/_async/client/inference.py
+++ b/elasticsearch/_async/client/inference.py
@@ -351,67 +351,6 @@ async def inference(
path_parts=__path_parts,
)
- @_rewrite_parameters(
- body_name="chat_completion_request",
- )
- async def post_eis_chat_completion(
- self,
- *,
- eis_inference_id: str,
- chat_completion_request: t.Optional[t.Mapping[str, t.Any]] = None,
- body: t.Optional[t.Mapping[str, t.Any]] = None,
- error_trace: t.Optional[bool] = None,
- filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
- human: t.Optional[bool] = None,
- pretty: t.Optional[bool] = None,
- ) -> ObjectApiResponse[t.Any]:
- """
- .. raw:: html
-
-
Perform a chat completion task through the Elastic Inference Service (EIS).
- Perform a chat completion inference task with the elastic
service.
-
-
- ``_
-
- :param eis_inference_id: The unique identifier of the inference endpoint.
- :param chat_completion_request:
- """
- if eis_inference_id in SKIP_IN_PATH:
- raise ValueError("Empty value passed for parameter 'eis_inference_id'")
- if chat_completion_request is None and body is None:
- raise ValueError(
- "Empty value passed for parameters 'chat_completion_request' and 'body', one of them should be set."
- )
- elif chat_completion_request is not None and body is not None:
- raise ValueError("Cannot set both 'chat_completion_request' and 'body'")
- __path_parts: t.Dict[str, str] = {"eis_inference_id": _quote(eis_inference_id)}
- __path = (
- f'/_inference/chat_completion/{__path_parts["eis_inference_id"]}/_stream'
- )
- __query: t.Dict[str, t.Any] = {}
- if error_trace is not None:
- __query["error_trace"] = error_trace
- if filter_path is not None:
- __query["filter_path"] = filter_path
- if human is not None:
- __query["human"] = human
- if pretty is not None:
- __query["pretty"] = pretty
- __body = (
- chat_completion_request if chat_completion_request is not None else body
- )
- __headers = {"accept": "application/json", "content-type": "application/json"}
- return await self.perform_request( # type: ignore[return-value]
- "POST",
- __path,
- params=__query,
- headers=__headers,
- body=__body,
- endpoint_id="inference.post_eis_chat_completion",
- path_parts=__path_parts,
- )
-
@_rewrite_parameters(
body_name="inference_config",
)
@@ -1088,83 +1027,6 @@ async def put_cohere(
path_parts=__path_parts,
)
- @_rewrite_parameters(
- body_fields=("service", "service_settings"),
- )
- async def put_eis(
- self,
- *,
- task_type: t.Union[str, t.Literal["chat_completion"]],
- eis_inference_id: str,
- service: t.Optional[t.Union[str, t.Literal["elastic"]]] = None,
- service_settings: t.Optional[t.Mapping[str, t.Any]] = None,
- error_trace: t.Optional[bool] = None,
- filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
- human: t.Optional[bool] = None,
- pretty: t.Optional[bool] = None,
- body: t.Optional[t.Dict[str, t.Any]] = None,
- ) -> ObjectApiResponse[t.Any]:
- """
- .. raw:: html
-
- Create an Elastic Inference Service (EIS) inference endpoint.
- Create an inference endpoint to perform an inference task through the Elastic Inference Service (EIS).
-
-
- ``_
-
- :param task_type: The type of the inference task that the model will perform.
- NOTE: The `chat_completion` task type only supports streaming and only through
- the _stream API.
- :param eis_inference_id: The unique identifier of the inference endpoint.
- :param service: The type of service supported for the specified task type. In
- this case, `elastic`.
- :param service_settings: Settings used to install the inference model. These
- settings are specific to the `elastic` service.
- """
- if task_type in SKIP_IN_PATH:
- raise ValueError("Empty value passed for parameter 'task_type'")
- if eis_inference_id in SKIP_IN_PATH:
- raise ValueError("Empty value passed for parameter 'eis_inference_id'")
- if service is None and body is None:
- raise ValueError("Empty value passed for parameter 'service'")
- if service_settings is None and body is None:
- raise ValueError("Empty value passed for parameter 'service_settings'")
- __path_parts: t.Dict[str, str] = {
- "task_type": _quote(task_type),
- "eis_inference_id": _quote(eis_inference_id),
- }
- __path = f'/_inference/{__path_parts["task_type"]}/{__path_parts["eis_inference_id"]}'
- __query: t.Dict[str, t.Any] = {}
- __body: t.Dict[str, t.Any] = body if body is not None else {}
- if error_trace is not None:
- __query["error_trace"] = error_trace
- if filter_path is not None:
- __query["filter_path"] = filter_path
- if human is not None:
- __query["human"] = human
- if pretty is not None:
- __query["pretty"] = pretty
- if not __body:
- if service is not None:
- __body["service"] = service
- if service_settings is not None:
- __body["service_settings"] = service_settings
- if not __body:
- __body = None # type: ignore[assignment]
- __headers = {"accept": "application/json"}
- if __body is not None:
- __headers["content-type"] = "application/json"
- return await self.perform_request( # type: ignore[return-value]
- "PUT",
- __path,
- params=__query,
- headers=__headers,
- body=__body,
- endpoint_id="inference.put_eis",
- path_parts=__path_parts,
- )
-
@_rewrite_parameters(
body_fields=(
"service",
diff --git a/elasticsearch/_sync/client/inference.py b/elasticsearch/_sync/client/inference.py
index 49a639a48..50a528250 100644
--- a/elasticsearch/_sync/client/inference.py
+++ b/elasticsearch/_sync/client/inference.py
@@ -351,67 +351,6 @@ def inference(
path_parts=__path_parts,
)
- @_rewrite_parameters(
- body_name="chat_completion_request",
- )
- def post_eis_chat_completion(
- self,
- *,
- eis_inference_id: str,
- chat_completion_request: t.Optional[t.Mapping[str, t.Any]] = None,
- body: t.Optional[t.Mapping[str, t.Any]] = None,
- error_trace: t.Optional[bool] = None,
- filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
- human: t.Optional[bool] = None,
- pretty: t.Optional[bool] = None,
- ) -> ObjectApiResponse[t.Any]:
- """
- .. raw:: html
-
- Perform a chat completion task through the Elastic Inference Service (EIS).
- Perform a chat completion inference task with the elastic
service.
-
-
- ``_
-
- :param eis_inference_id: The unique identifier of the inference endpoint.
- :param chat_completion_request:
- """
- if eis_inference_id in SKIP_IN_PATH:
- raise ValueError("Empty value passed for parameter 'eis_inference_id'")
- if chat_completion_request is None and body is None:
- raise ValueError(
- "Empty value passed for parameters 'chat_completion_request' and 'body', one of them should be set."
- )
- elif chat_completion_request is not None and body is not None:
- raise ValueError("Cannot set both 'chat_completion_request' and 'body'")
- __path_parts: t.Dict[str, str] = {"eis_inference_id": _quote(eis_inference_id)}
- __path = (
- f'/_inference/chat_completion/{__path_parts["eis_inference_id"]}/_stream'
- )
- __query: t.Dict[str, t.Any] = {}
- if error_trace is not None:
- __query["error_trace"] = error_trace
- if filter_path is not None:
- __query["filter_path"] = filter_path
- if human is not None:
- __query["human"] = human
- if pretty is not None:
- __query["pretty"] = pretty
- __body = (
- chat_completion_request if chat_completion_request is not None else body
- )
- __headers = {"accept": "application/json", "content-type": "application/json"}
- return self.perform_request( # type: ignore[return-value]
- "POST",
- __path,
- params=__query,
- headers=__headers,
- body=__body,
- endpoint_id="inference.post_eis_chat_completion",
- path_parts=__path_parts,
- )
-
@_rewrite_parameters(
body_name="inference_config",
)
@@ -1088,83 +1027,6 @@ def put_cohere(
path_parts=__path_parts,
)
- @_rewrite_parameters(
- body_fields=("service", "service_settings"),
- )
- def put_eis(
- self,
- *,
- task_type: t.Union[str, t.Literal["chat_completion"]],
- eis_inference_id: str,
- service: t.Optional[t.Union[str, t.Literal["elastic"]]] = None,
- service_settings: t.Optional[t.Mapping[str, t.Any]] = None,
- error_trace: t.Optional[bool] = None,
- filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
- human: t.Optional[bool] = None,
- pretty: t.Optional[bool] = None,
- body: t.Optional[t.Dict[str, t.Any]] = None,
- ) -> ObjectApiResponse[t.Any]:
- """
- .. raw:: html
-
- Create an Elastic Inference Service (EIS) inference endpoint.
- Create an inference endpoint to perform an inference task through the Elastic Inference Service (EIS).
-
-
- ``_
-
- :param task_type: The type of the inference task that the model will perform.
- NOTE: The `chat_completion` task type only supports streaming and only through
- the _stream API.
- :param eis_inference_id: The unique identifier of the inference endpoint.
- :param service: The type of service supported for the specified task type. In
- this case, `elastic`.
- :param service_settings: Settings used to install the inference model. These
- settings are specific to the `elastic` service.
- """
- if task_type in SKIP_IN_PATH:
- raise ValueError("Empty value passed for parameter 'task_type'")
- if eis_inference_id in SKIP_IN_PATH:
- raise ValueError("Empty value passed for parameter 'eis_inference_id'")
- if service is None and body is None:
- raise ValueError("Empty value passed for parameter 'service'")
- if service_settings is None and body is None:
- raise ValueError("Empty value passed for parameter 'service_settings'")
- __path_parts: t.Dict[str, str] = {
- "task_type": _quote(task_type),
- "eis_inference_id": _quote(eis_inference_id),
- }
- __path = f'/_inference/{__path_parts["task_type"]}/{__path_parts["eis_inference_id"]}'
- __query: t.Dict[str, t.Any] = {}
- __body: t.Dict[str, t.Any] = body if body is not None else {}
- if error_trace is not None:
- __query["error_trace"] = error_trace
- if filter_path is not None:
- __query["filter_path"] = filter_path
- if human is not None:
- __query["human"] = human
- if pretty is not None:
- __query["pretty"] = pretty
- if not __body:
- if service is not None:
- __body["service"] = service
- if service_settings is not None:
- __body["service_settings"] = service_settings
- if not __body:
- __body = None # type: ignore[assignment]
- __headers = {"accept": "application/json"}
- if __body is not None:
- __headers["content-type"] = "application/json"
- return self.perform_request( # type: ignore[return-value]
- "PUT",
- __path,
- params=__query,
- headers=__headers,
- body=__body,
- endpoint_id="inference.put_eis",
- path_parts=__path_parts,
- )
-
@_rewrite_parameters(
body_fields=(
"service",