From fc79f623ee08e1b5ab2ffe958523c17410bf622b Mon Sep 17 00:00:00 2001
From: Samiul Monir <150824886+Samiul-TheSoccerFan@users.noreply.github.com>
Date: Thu, 23 Jan 2025 21:52:18 -0500
Subject: [PATCH] Adding flyout to create inference endpoint from Index
Management (#205184)
## Summary
### Current Behavior
When adding the `semantic_text` field, users can choose from the
preconfigured `elser` or `e5` endpoints. If they wish to use a
third-party endpoint or create a new `elser` or `e5` endpoint, they need
to use the API or the inference management UI.
### Planned Improvement
To streamline this process, we plan to integrate the functionality for
creating inference endpoints directly within the `semantic_text`
inference selection popover. This enhancement will allow users to create
inference endpoints without leaving the index management interface,
making it more convenient to add fields within the index management
mapping tab.
This PR includes:
- Adding inference endpoint from the `mapping` tab of `index
management`.
### Recording
https://github.com/user-attachments/assets/2f94cc93-9829-444d-a60f-0e721a4a751b
### Checklist
Check the PR satisfies the following conditions.
Reviewers should verify this PR satisfies this list as well.
- [X] Any text added follows [EUI's writing
guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses
sentence case text and includes [i18n
support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md)
- [X] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
- [X] [Flaky Test
Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was
used on any tests changed
- [X] The PR description includes the appropriate Release Notes section,
and the correct `release_note:*` label is applied per the
[guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)
---------
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
---
.../kbn-inference-endpoint-ui-common/index.ts | 3 +
.../inference_flyout_wrapper.test.tsx | 90 ++++++
.../components/inference_flyout_wrapper.tsx | 126 ++++++++
.../inference_service_form_fields.test.tsx | 107 +------
.../src/translations.ts | 15 +
.../src/types/types.ts | 5 +
.../src/utils/mock_providers.ts | 151 ++++++++++
.../translations/translations/fr-FR.json | 3 -
.../translations/translations/ja-JP.json | 3 -
.../translations/translations/zh-CN.json | 3 -
.../select_inference_id.test.tsx | 16 +-
.../public/application/app_context.tsx | 9 +-
.../mappings_editor.test.tsx | 5 +-
.../field_parameters/select_inference_id.tsx | 159 ++++------
.../fields/create_field/create_field.tsx | 46 ++-
.../semantic_text/use_semantic_text.test.ts | 273 ------------------
.../semantic_text/use_semantic_text.ts | 158 ----------
.../hooks/use_add_endpoint.test.tsx | 92 ++++++
.../application/hooks/use_add_endpoint.ts | 48 +++
.../public/application/services/api.ts | 15 +-
.../public/application/services/index.ts | 1 +
.../hooks/use_ml_model_status_toasts.ts | 30 +-
.../shared/index_management/tsconfig.json | 1 +
23 files changed, 679 insertions(+), 680 deletions(-)
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_flyout_wrapper.test.tsx
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_flyout_wrapper.tsx
create mode 100644 x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/utils/mock_providers.ts
delete mode 100644 x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/semantic_text/use_semantic_text.test.ts
delete mode 100644 x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/semantic_text/use_semantic_text.ts
create mode 100644 x-pack/platform/plugins/shared/index_management/public/application/hooks/use_add_endpoint.test.tsx
create mode 100644 x-pack/platform/plugins/shared/index_management/public/application/hooks/use_add_endpoint.ts
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/index.ts b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/index.ts
index 84ee2608c546b..b819501b35c88 100644
--- a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/index.ts
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/index.ts
@@ -6,6 +6,9 @@
*/
export { InferenceServiceFormFields } from './src/components/inference_service_form_fields';
+// default export required for React.Lazy
+// eslint-disable-next-line import/no-default-export
+export { InferenceFlyoutWrapper as default } from './src/components/inference_flyout_wrapper';
export { useProviders } from './src/hooks/use_providers';
export { SERVICE_PROVIDERS } from './src/components/providers/render_service_provider/service_provider';
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_flyout_wrapper.test.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_flyout_wrapper.test.tsx
new file mode 100644
index 0000000000000..8241715df02fd
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_flyout_wrapper.test.tsx
@@ -0,0 +1,90 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { Form, useForm } from '@kbn/es-ui-shared-plugin/static/forms/hook_form_lib';
+import { I18nProvider } from '@kbn/i18n-react';
+import userEvent from '@testing-library/user-event';
+import { render, screen } from '@testing-library/react';
+import { act } from 'react-dom/test-utils';
+import React from 'react';
+import { httpServiceMock } from '@kbn/core-http-browser-mocks';
+import { notificationServiceMock } from '@kbn/core-notifications-browser-mocks';
+
+import { InferenceFlyoutWrapper } from './inference_flyout_wrapper';
+import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
+import { mockProviders } from '../utils/mock_providers';
+
+const mockAddEndpoint = jest.fn();
+const mockOnSubmitSuccess = jest.fn();
+const mockOnClose = jest.fn();
+const httpMock = httpServiceMock.createStartContract();
+const notificationsMock = notificationServiceMock.createStartContract();
+
+jest.mock('../hooks/use_providers', () => ({
+ useProviders: jest.fn(() => ({
+ data: mockProviders,
+ })),
+}));
+
+const MockFormProvider = ({ children }: { children: React.ReactElement }) => {
+ const { form } = useForm();
+ const queryClient = new QueryClient();
+ return (
+
+
+
+
+
+ );
+};
+
+describe('InferenceFlyout', () => {
+ beforeEach(async () => {
+ jest.clearAllMocks();
+
+ await act(async () => {
+ render(
+
+
+
+ );
+ });
+ });
+
+ it('renders', async () => {
+ expect(screen.getByTestId('inference-flyout')).toBeInTheDocument();
+ expect(screen.getByTestId('inference-flyout-header')).toBeInTheDocument();
+ expect(screen.getByTestId('inference-flyout-close-button')).toBeInTheDocument();
+ });
+
+ it('invalidates form if no provider is selected', async () => {
+ await userEvent.click(screen.getByTestId('inference-endpoint-submit-button'));
+ expect(screen.getByText('Provider is required.')).toBeInTheDocument();
+ expect(mockAddEndpoint).not.toHaveBeenCalled();
+ expect(screen.getByTestId('inference-endpoint-submit-button')).toBeDisabled();
+ });
+
+ it('submit form', async () => {
+ await userEvent.click(screen.getByTestId('provider-select'));
+ await userEvent.click(screen.getByText('Elasticsearch'));
+ await userEvent.click(screen.getByTestId('inference-endpoint-submit-button'));
+
+ expect(mockAddEndpoint).toHaveBeenCalled();
+ });
+
+ it('closes flyout', async () => {
+ await userEvent.click(screen.getByTestId('inference-flyout-close-button'));
+ expect(mockOnClose).toBeCalled();
+ });
+});
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_flyout_wrapper.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_flyout_wrapper.tsx
new file mode 100644
index 0000000000000..c2c195637d216
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_flyout_wrapper.tsx
@@ -0,0 +1,126 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import {
+ EuiButton,
+ EuiButtonEmpty,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiFlyout,
+ EuiFlyoutBody,
+ EuiFlyoutFooter,
+ EuiFlyoutHeader,
+ EuiSpacer,
+ EuiTitle,
+ useGeneratedHtmlId,
+} from '@elastic/eui';
+import React, { useCallback, useState } from 'react';
+
+import { HttpSetup, IToasts } from '@kbn/core/public';
+import { Form, useForm } from '@kbn/es-ui-shared-plugin/static/forms/hook_form_lib';
+import * as LABELS from '../translations';
+import { InferenceEndpoint } from '../types/types';
+import { InferenceServiceFormFields } from './inference_service_form_fields';
+
+interface InferenceFlyoutWrapperProps {
+ onFlyoutClose: (state: boolean) => void;
+ addInferenceEndpoint: (
+ inferenceEndpoint: InferenceEndpoint,
+ onSuccess: (inferenceId: string) => void,
+ onError: () => void
+ ) => Promise;
+ http: HttpSetup;
+ toasts: IToasts;
+ onSubmitSuccess?: (inferenceId: string) => void;
+ isEdit?: boolean;
+}
+
+export const InferenceFlyoutWrapper: React.FC = ({
+ onFlyoutClose,
+ addInferenceEndpoint,
+ http,
+ toasts,
+ onSubmitSuccess,
+ isEdit,
+}) => {
+ const inferenceCreationFlyoutId = useGeneratedHtmlId({
+ prefix: 'InferenceFlyoutId',
+ });
+ const closeFlyout = () => onFlyoutClose(false);
+ const [isLoading, setIsLoading] = useState(false);
+ const onSuccess = useCallback(
+ (inferenceId: string) => {
+ setIsLoading(false);
+ onSubmitSuccess?.(inferenceId);
+ },
+ [onSubmitSuccess]
+ );
+ const onError = useCallback(() => {
+ setIsLoading(false);
+ }, []);
+
+ const { form } = useForm();
+ const handleSubmit = useCallback(async () => {
+ setIsLoading(true);
+ const { isValid, data } = await form.submit();
+
+ if (isValid) {
+ addInferenceEndpoint(data as InferenceEndpoint, onSuccess, onError);
+ } else {
+ setIsLoading(false);
+ }
+ }, [addInferenceEndpoint, form, onError, onSuccess]);
+
+ return (
+
+
+
+ {LABELS.ENDPOINT_TITLE}
+
+
+
+
+
+
+
+
+
+ {LABELS.CANCEL}
+
+
+
+
+
+ );
+};
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_service_form_fields.test.tsx b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_service_form_fields.test.tsx
index edf80bde790d8..a9cadc5c16186 100644
--- a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_service_form_fields.test.tsx
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/components/inference_service_form_fields.test.tsx
@@ -6,7 +6,6 @@
*/
import { InferenceServiceFormFields } from './inference_service_form_fields';
-import { FieldType, InferenceProvider } from '../types/types';
import React from 'react';
import { render, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
@@ -14,111 +13,7 @@ import { Form, useForm } from '@kbn/es-ui-shared-plugin/static/forms/hook_form_l
import { I18nProvider } from '@kbn/i18n-react';
import { httpServiceMock } from '@kbn/core-http-browser-mocks';
import { notificationServiceMock } from '@kbn/core-notifications-browser-mocks';
-
-const mockProviders = [
- {
- service: 'hugging_face',
- name: 'Hugging Face',
- task_types: ['text_embedding', 'sparse_embedding'],
- configurations: {
- api_key: {
- default_value: null,
- description: `API Key for the provider you're connecting to.`,
- label: 'API Key',
- required: true,
- sensitive: true,
- updatable: true,
- type: FieldType.STRING,
- supported_task_types: ['text_embedding', 'sparse_embedding'],
- },
- 'rate_limit.requests_per_minute': {
- default_value: null,
- description: 'Minimize the number of rate limit errors.',
- label: 'Rate Limit',
- required: false,
- sensitive: false,
- updatable: true,
- type: FieldType.INTEGER,
- supported_task_types: ['text_embedding', 'sparse_embedding'],
- },
- url: {
- default_value: 'https://api.openai.com/v1/embeddings',
- description: 'The URL endpoint to use for the requests.',
- label: 'URL',
- required: true,
- sensitive: false,
- updatable: true,
- type: FieldType.STRING,
- supported_task_types: ['text_embedding', 'sparse_embedding'],
- },
- },
- },
- {
- service: 'cohere',
- name: 'Cohere',
- task_types: ['text_embedding', 'rerank', 'completion'],
- configurations: {
- api_key: {
- default_value: null,
- description: `API Key for the provider you're connecting to.`,
- label: 'API Key',
- required: true,
- sensitive: true,
- updatable: true,
- type: FieldType.STRING,
- supported_task_types: ['text_embedding', 'rerank', 'completion'],
- },
- 'rate_limit.requests_per_minute': {
- default_value: null,
- description: 'Minimize the number of rate limit errors.',
- label: 'Rate Limit',
- required: false,
- sensitive: false,
- updatable: true,
- type: FieldType.INTEGER,
- supported_task_types: ['text_embedding', 'completion'],
- },
- },
- },
- {
- service: 'anthropic',
- name: 'Anthropic',
- task_types: ['completion'],
- configurations: {
- api_key: {
- default_value: null,
- description: `API Key for the provider you're connecting to.`,
- label: 'API Key',
- required: true,
- sensitive: true,
- updatable: true,
- type: FieldType.STRING,
- supported_task_types: ['completion'],
- },
- 'rate_limit.requests_per_minute': {
- default_value: null,
- description:
- 'By default, the anthropic service sets the number of requests allowed per minute to 50.',
- label: 'Rate Limit',
- required: false,
- sensitive: false,
- updatable: true,
- type: FieldType.INTEGER,
- supported_task_types: ['completion'],
- },
- model_id: {
- default_value: null,
- description: 'The name of the model to use for the inference task.',
- label: 'Model ID',
- required: true,
- sensitive: false,
- updatable: true,
- type: FieldType.STRING,
- supported_task_types: ['completion'],
- },
- },
- },
-] as InferenceProvider[];
+import { mockProviders } from '../utils/mock_providers';
jest.mock('../hooks/use_providers', () => ({
useProviders: jest.fn(() => ({
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/translations.ts b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/translations.ts
index 3c9bab9ecb6fe..867625b993bc5 100644
--- a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/translations.ts
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/translations.ts
@@ -134,3 +134,18 @@ export const GET_PROVIDERS_FAILED = i18n.translate(
defaultMessage: 'Unable to find providers',
}
);
+
+export const ENDPOINT_TITLE = i18n.translate(
+ 'xpack.inferenceEndpointUICommon.components.EndpointTitle',
+ {
+ defaultMessage: 'Inference Endpoint',
+ }
+);
+
+export const CANCEL = i18n.translate('xpack.inferenceEndpointUICommon.components.cancelBtnLabel', {
+ defaultMessage: 'Cancel',
+});
+
+export const SAVE = i18n.translate('xpack.inferenceEndpointUICommon.components.saveBtnLabel', {
+ defaultMessage: 'Save',
+});
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/types/types.ts b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/types/types.ts
index 7e9533cf047dc..17f163cabdb5d 100644
--- a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/types/types.ts
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/types/types.ts
@@ -53,3 +53,8 @@ export interface Secrets {
}
export const INFERENCE_ENDPOINT_INTERNAL_API_VERSION = '1';
+
+export interface InferenceEndpoint {
+ config: Config;
+ secrets: Secrets;
+}
diff --git a/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/utils/mock_providers.ts b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/utils/mock_providers.ts
new file mode 100644
index 0000000000000..032d678c55085
--- /dev/null
+++ b/x-pack/platform/packages/shared/kbn-inference-endpoint-ui-common/src/utils/mock_providers.ts
@@ -0,0 +1,151 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { FieldType, InferenceProvider } from '../types/types';
+
+export const mockProviders: InferenceProvider[] = [
+ {
+ service: 'hugging_face',
+ name: 'Hugging Face',
+ task_types: ['text_embedding', 'sparse_embedding'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ supported_task_types: ['text_embedding', 'sparse_embedding'],
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ supported_task_types: ['text_embedding', 'sparse_embedding'],
+ },
+ url: {
+ default_value: 'https://api.openai.com/v1/embeddings',
+ description: 'The URL endpoint to use for the requests.',
+ label: 'URL',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ supported_task_types: ['text_embedding', 'sparse_embedding'],
+ },
+ },
+ },
+ {
+ service: 'cohere',
+ name: 'Cohere',
+ task_types: ['text_embedding', 'rerank', 'completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ supported_task_types: ['text_embedding', 'rerank', 'completion'],
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description: 'Minimize the number of rate limit errors.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ supported_task_types: ['text_embedding', 'rerank', 'completion'],
+ },
+ },
+ },
+ {
+ service: 'anthropic',
+ name: 'Anthropic',
+ task_types: ['completion'],
+ configurations: {
+ api_key: {
+ default_value: null,
+ description: `API Key for the provider you're connecting to.`,
+ label: 'API Key',
+ required: true,
+ sensitive: true,
+ updatable: true,
+ type: FieldType.STRING,
+ supported_task_types: ['completion'],
+ },
+ 'rate_limit.requests_per_minute': {
+ default_value: null,
+ description:
+ 'By default, the anthropic service sets the number of requests allowed per minute to 50.',
+ label: 'Rate Limit',
+ required: false,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ supported_task_types: ['completion'],
+ },
+ model_id: {
+ default_value: null,
+ description: 'The name of the model to use for the inference task.',
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ supported_task_types: ['completion'],
+ },
+ },
+ },
+ {
+ service: 'elasticsearch',
+ name: 'Elasticsearch',
+ task_types: ['sparse_embedding', 'text_embedding', 'rerank'],
+ configurations: {
+ num_allocations: {
+ default_value: 1,
+ description:
+ 'The total number of allocations this model is assigned across machine learning nodes.',
+ label: 'Number Allocations',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ supported_task_types: ['sparse_embedding', 'text_embedding', 'rerank'],
+ },
+ num_threads: {
+ default_value: 2,
+ description: 'Sets the number of threads used by each model allocation during inference.',
+ label: 'Number Threads',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.INTEGER,
+ supported_task_types: ['sparse_embedding', 'text_embedding', 'rerank'],
+ },
+ model_id: {
+ default_value: '.elser_model_2',
+ description: 'The name of the model to use for the inference task.',
+ label: 'Model ID',
+ required: true,
+ sensitive: false,
+ updatable: true,
+ type: FieldType.STRING,
+ supported_task_types: ['sparse_embedding', 'text_embedding', 'rerank'],
+ },
+ },
+ },
+];
diff --git a/x-pack/platform/plugins/private/translations/translations/fr-FR.json b/x-pack/platform/plugins/private/translations/translations/fr-FR.json
index ec3840dd19284..59bb6e6404e60 100644
--- a/x-pack/platform/plugins/private/translations/translations/fr-FR.json
+++ b/x-pack/platform/plugins/private/translations/translations/fr-FR.json
@@ -20773,7 +20773,6 @@
"xpack.idxMgmt.mappingsEditor.createField.addMultiFieldButtonLabel": "Ajouter un champ multiple",
"xpack.idxMgmt.mappingsEditor.createField.cancelButtonLabel": "Annuler",
"xpack.idxMgmt.mappingsEditor.createField.modelDeployedNotification": "Le modèle {modelName} a été déployé sur votre nœud de machine learning.",
- "xpack.idxMgmt.mappingsEditor.createField.modelDeploymentErrorTitle": "Échec du déploiement du modèle",
"xpack.idxMgmt.mappingsEditor.createField.modelDeploymentNotification": "Le modèle {modelName} est en cours de déploiement sur votre nœud de machine learning.",
"xpack.idxMgmt.mappingsEditor.createField.modelDeploymentStartedNotification": "Le déploiement du modèle a commencé",
"xpack.idxMgmt.mappingsEditor.customButtonLabel": "Utiliser un analyseur personnalisé",
@@ -21133,7 +21132,6 @@
"xpack.idxMgmt.mappingsEditor.parameters.dimsHelpTextDescription": "Nombre de dimensions dans le vecteur.",
"xpack.idxMgmt.mappingsEditor.parameters.geoPointNullValueHelpText": "Vous pouvez exprimer les points géographiques sous la forme d'un objet, d'une chaîne, d'un geohash, d'un tableau ou d'un POINT {docsLink}.",
"xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.alreadyExistsLabel": "Aucun point de terminaison d'inférence sélectionné",
- "xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.defaultLabel": "Le point de terminaison d'inférence {inferenceId} existe déjà",
"xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.manageInferenceEndpointButton": "Gérer les points de terminaison d'inférence",
"xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.selectable.ariaLabel": "Points de terminaison existants",
"xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.selectable.Label": "Points de terminaison existants",
@@ -21223,7 +21221,6 @@
"xpack.idxMgmt.mappingsEditor.searchQuoteAnalyzerFieldLabel": "Analyseur de termes de recherche",
"xpack.idxMgmt.mappingsEditor.searchResult.emptyPrompt.clearSearchButtonLabel": "Effacer la recherche",
"xpack.idxMgmt.mappingsEditor.searchResult.emptyPromptTitle": "Aucun champ ne correspond à votre recherche",
- "xpack.idxMgmt.mappingsEditor.semanticText.inferenceError": "Aucun modèle d'inférence trouvé pour l'ID d'inférence {inferenceId}",
"xpack.idxMgmt.mappingsEditor.semanticTextNameFieldLabel": "Nom du nouveau champ",
"xpack.idxMgmt.mappingsEditor.setSimilarityFieldDescription": "Algorithme de notation ou similarité à utiliser.",
"xpack.idxMgmt.mappingsEditor.setSimilarityFieldTitle": "Définir la similarité",
diff --git a/x-pack/platform/plugins/private/translations/translations/ja-JP.json b/x-pack/platform/plugins/private/translations/translations/ja-JP.json
index 3531e85feb65c..f228052f48391 100644
--- a/x-pack/platform/plugins/private/translations/translations/ja-JP.json
+++ b/x-pack/platform/plugins/private/translations/translations/ja-JP.json
@@ -20633,7 +20633,6 @@
"xpack.idxMgmt.mappingsEditor.createField.addMultiFieldButtonLabel": "マルチフィールドの追加",
"xpack.idxMgmt.mappingsEditor.createField.cancelButtonLabel": "キャンセル",
"xpack.idxMgmt.mappingsEditor.createField.modelDeployedNotification": "モデル\"{modelName}\"が機械学習ノードにデプロイされました。",
- "xpack.idxMgmt.mappingsEditor.createField.modelDeploymentErrorTitle": "モデルのデプロイが失敗しました",
"xpack.idxMgmt.mappingsEditor.createField.modelDeploymentNotification": "モデル\"{modelName}\"は機械学習ノードにデプロイ中です。",
"xpack.idxMgmt.mappingsEditor.createField.modelDeploymentStartedNotification": "モデルのデプロイが開始しました",
"xpack.idxMgmt.mappingsEditor.customButtonLabel": "カスタムアナライザーの使用",
@@ -20993,7 +20992,6 @@
"xpack.idxMgmt.mappingsEditor.parameters.dimsHelpTextDescription": "ベルトルでの次元数。",
"xpack.idxMgmt.mappingsEditor.parameters.geoPointNullValueHelpText": "地点は、オブジェクト、文字列、ジオハッシュ、配列または{docsLink} POINTとして表現できます。",
"xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.alreadyExistsLabel": "推論エンドポイントが選択されていません",
- "xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.defaultLabel": "推論エンドポイント{inferenceId}はすでに存在します",
"xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.manageInferenceEndpointButton": "推論エンドポイントを管理",
"xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.selectable.ariaLabel": "既存のエンドポイント",
"xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.selectable.Label": "既存のエンドポイント",
@@ -21083,7 +21081,6 @@
"xpack.idxMgmt.mappingsEditor.searchQuoteAnalyzerFieldLabel": "検索見積もりアナライザー",
"xpack.idxMgmt.mappingsEditor.searchResult.emptyPrompt.clearSearchButtonLabel": "検索のクリア",
"xpack.idxMgmt.mappingsEditor.searchResult.emptyPromptTitle": "検索にマッチするフィールドがありません",
- "xpack.idxMgmt.mappingsEditor.semanticText.inferenceError": "推論ID {inferenceId}の推論モデルが見つかりません",
"xpack.idxMgmt.mappingsEditor.semanticTextNameFieldLabel": "新しいフィールド名",
"xpack.idxMgmt.mappingsEditor.setSimilarityFieldDescription": "使用するためのスコアリングアルゴリズムや類似性。",
"xpack.idxMgmt.mappingsEditor.setSimilarityFieldTitle": "類似性の設定",
diff --git a/x-pack/platform/plugins/private/translations/translations/zh-CN.json b/x-pack/platform/plugins/private/translations/translations/zh-CN.json
index 43226ddc03bf0..4618a932f3e1a 100644
--- a/x-pack/platform/plugins/private/translations/translations/zh-CN.json
+++ b/x-pack/platform/plugins/private/translations/translations/zh-CN.json
@@ -20302,7 +20302,6 @@
"xpack.idxMgmt.mappingsEditor.createField.addMultiFieldButtonLabel": "添加多字段",
"xpack.idxMgmt.mappingsEditor.createField.cancelButtonLabel": "取消",
"xpack.idxMgmt.mappingsEditor.createField.modelDeployedNotification": "模型 {modelName} 已部署到您的 Machine Learning 节点。",
- "xpack.idxMgmt.mappingsEditor.createField.modelDeploymentErrorTitle": "模型部署失败",
"xpack.idxMgmt.mappingsEditor.createField.modelDeploymentNotification": "模型 {modelName} 正部署到您的 Machine Learning 节点。",
"xpack.idxMgmt.mappingsEditor.createField.modelDeploymentStartedNotification": "模型部署已开始",
"xpack.idxMgmt.mappingsEditor.customButtonLabel": "使用定制分析器",
@@ -20658,7 +20657,6 @@
"xpack.idxMgmt.mappingsEditor.parameters.dimsHelpTextDescription": "向量中的维度数。",
"xpack.idxMgmt.mappingsEditor.parameters.geoPointNullValueHelpText": "地理坐标点可表示为对象、字符串、geohash、数组或 {docsLink} POINT。",
"xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.alreadyExistsLabel": "未选择推理终端",
- "xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.defaultLabel": "推理终端 {inferenceId} 已存在",
"xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.manageInferenceEndpointButton": "管理推理终端",
"xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.selectable.ariaLabel": "现有终端",
"xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.selectable.Label": "现有终端",
@@ -20748,7 +20746,6 @@
"xpack.idxMgmt.mappingsEditor.searchQuoteAnalyzerFieldLabel": "搜索引号分析器",
"xpack.idxMgmt.mappingsEditor.searchResult.emptyPrompt.clearSearchButtonLabel": "清除搜索",
"xpack.idxMgmt.mappingsEditor.searchResult.emptyPromptTitle": "没有字段匹配您的搜索",
- "xpack.idxMgmt.mappingsEditor.semanticText.inferenceError": "找不到推理 ID 为 {inferenceId} 的推理模型",
"xpack.idxMgmt.mappingsEditor.semanticTextNameFieldLabel": "新字段名称",
"xpack.idxMgmt.mappingsEditor.setSimilarityFieldDescription": "要使用的评分算法或相似度。",
"xpack.idxMgmt.mappingsEditor.setSimilarityFieldTitle": "设置相似度",
diff --git a/x-pack/platform/plugins/shared/index_management/__jest__/client_integration/index_details_page/select_inference_id.test.tsx b/x-pack/platform/plugins/shared/index_management/__jest__/client_integration/index_details_page/select_inference_id.test.tsx
index 81272dc1c4c8d..6861e3c6cd3ee 100644
--- a/x-pack/platform/plugins/shared/index_management/__jest__/client_integration/index_details_page/select_inference_id.test.tsx
+++ b/x-pack/platform/plugins/shared/index_management/__jest__/client_integration/index_details_page/select_inference_id.test.tsx
@@ -18,7 +18,6 @@ import {
import React from 'react';
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
-const createInferenceEndpointMock = jest.fn();
const mockDispatch = jest.fn();
const INFERENCE_LOCATOR = 'SEARCH_INFERENCE_ENDPOINTS';
const createMockLocator = (id: string) => ({
@@ -36,6 +35,11 @@ jest.mock('../../../public/application/app_context', () => ({
},
},
},
+ services: {
+ notificationService: {
+ toasts: {},
+ },
+ },
docLinks: {
links: {
inferenceManagement: {
@@ -44,14 +48,6 @@ jest.mock('../../../public/application/app_context', () => ({
},
},
plugins: {
- ml: {
- mlApi: {
- trainedModels: {
- getTrainedModels: jest.fn().mockResolvedValue([]),
- getTrainedModelStats: jest.fn().mockResolvedValue([]),
- },
- },
- },
share: {
url: {
locators: {
@@ -120,7 +116,6 @@ describe('SelectInferenceId', () => {
beforeAll(async () => {
const defaultProps: SelectInferenceIdProps = {
'data-test-subj': 'data-inference-endpoint-list',
- createInferenceEndpoint: createInferenceEndpointMock,
};
const setup = registerTestBed(getTestForm(SelectInferenceId), {
defaultProps,
@@ -142,6 +137,7 @@ describe('SelectInferenceId', () => {
find('inferenceIdButton').simulate('click');
expect(exists('learn-how-to-create-inference-endpoints')).toBe(true);
expect(exists('manageInferenceEndpointButton')).toBe(true);
+ expect(exists('createInferenceEndpointButton')).toBe(true);
});
it('should display the inference endpoints in the combo', () => {
diff --git a/x-pack/platform/plugins/shared/index_management/public/application/app_context.tsx b/x-pack/platform/plugins/shared/index_management/public/application/app_context.tsx
index 5fc1363050026..fbadd9a5df565 100644
--- a/x-pack/platform/plugins/shared/index_management/public/application/app_context.tsx
+++ b/x-pack/platform/plugins/shared/index_management/public/application/app_context.tsx
@@ -29,6 +29,7 @@ import type { ConsolePluginStart } from '@kbn/console-plugin/public';
import { EuiBreadcrumb } from '@elastic/eui';
import { LicensingPluginStart } from '@kbn/licensing-plugin/public';
+import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
import { ExtensionsService } from '../services';
import { HttpService, NotificationService, UiMetricService } from './services';
import { IndexManagementBreadcrumb } from './services/breadcrumbs';
@@ -90,6 +91,8 @@ export interface AppDependencies {
};
}
+const queryClient = new QueryClient({});
+
export const AppContextProvider = ({
children,
value,
@@ -97,7 +100,11 @@ export const AppContextProvider = ({
value: AppDependencies;
children: React.ReactNode;
}) => {
- return {children};
+ return (
+
+ {children}
+
+ );
};
export const AppContextConsumer = AppContext.Consumer;
diff --git a/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/__jest__/client_integration/mappings_editor.test.tsx b/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/__jest__/client_integration/mappings_editor.test.tsx
index 1a99d74e6bb13..a77748379c0b9 100644
--- a/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/__jest__/client_integration/mappings_editor.test.tsx
+++ b/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/__jest__/client_integration/mappings_editor.test.tsx
@@ -29,7 +29,10 @@ describe('Mappings editor: core', () => {
let getMappingsEditorData = getMappingsEditorDataFactory(onChangeHandler);
let testBed: MappingsEditorTestBed;
const appDependencies = {
- core: { application: {} },
+ core: { application: {}, http: {} },
+ services: {
+ notificationService: { toasts: {} },
+ },
docLinks: {
links: {
inferenceManagement: {
diff --git a/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/select_inference_id.tsx b/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/select_inference_id.tsx
index 813cc1023c06d..e9ed4d5457090 100644
--- a/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/select_inference_id.tsx
+++ b/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/components/document_fields/field_parameters/select_inference_id.tsx
@@ -22,28 +22,19 @@ import {
EuiTitle,
EuiIcon,
EuiLink,
+ EuiLoadingSpinner,
} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
-import React, { useEffect, useState, useCallback, useMemo } from 'react';
+import React, { useState, useCallback, useMemo, lazy, Suspense } from 'react';
-import { SUPPORTED_PYTORCH_TASKS, TRAINED_MODEL_TYPE } from '@kbn/ml-trained-models-utils';
-import { InferenceTaskType } from '@elastic/elasticsearch/lib/api/types';
-import { ModelConfig } from '@kbn/inference_integration_flyout/types';
-import { InferenceFlyoutWrapper } from '@kbn/inference_integration_flyout/components/inference_flyout_wrapper';
-import { TrainedModelConfigResponse } from '@kbn/ml-plugin/common/types/trained_models';
+import { useAddEndpoint } from '../../../../../hooks/use_add_endpoint';
import { getFieldConfig } from '../../../lib';
import { useAppContext } from '../../../../../app_context';
import { useLoadInferenceEndpoints } from '../../../../../services/api';
-import { useMLModelNotificationToasts } from '../../../../../../hooks/use_ml_model_status_toasts';
-import { CustomInferenceEndpointConfig } from '../../../types';
import { UseField } from '../../../shared_imports';
+const InferenceFlyoutWrapper = lazy(() => import('@kbn/inference-endpoint-ui-common'));
export interface SelectInferenceIdProps {
- createInferenceEndpoint: (
- trainedModelId: string,
- inferenceId: string,
- modelConfig: CustomInferenceEndpointConfig
- ) => Promise;
'data-test-subj'?: string;
}
@@ -53,7 +44,6 @@ type SelectInferenceIdContentProps = SelectInferenceIdProps & {
};
export const SelectInferenceId: React.FC = ({
- createInferenceEndpoint,
'data-test-subj': dataTestSubj,
}: SelectInferenceIdProps) => {
const config = getFieldConfig('inference_id');
@@ -62,7 +52,6 @@ export const SelectInferenceId: React.FC = ({
{(field) => {
return (
= ({
};
const SelectInferenceIdContent: React.FC = ({
- createInferenceEndpoint,
'data-test-subj': dataTestSubj,
setValue,
value,
}) => {
const {
- core: { application },
+ core: { application, http },
+ services: {
+ notificationService: { toasts },
+ },
docLinks,
- plugins: { ml, share },
+ plugins: { share },
} = useAppContext();
+ const { addInferenceEndpoint } = useAddEndpoint();
const config = getFieldConfig('inference_id');
const inferenceEndpointsPageLink = share?.url.locators
@@ -91,35 +83,21 @@ const SelectInferenceIdContent: React.FC = ({
?.useUrl({});
const [isInferenceFlyoutVisible, setIsInferenceFlyoutVisible] = useState(false);
- const [availableTrainedModels, setAvailableTrainedModels] = useState<
- TrainedModelConfigResponse[]
- >([]);
const onFlyoutClose = useCallback(() => {
setIsInferenceFlyoutVisible(!isInferenceFlyoutVisible);
}, [isInferenceFlyoutVisible]);
- useEffect(() => {
- const fetchAvailableTrainedModels = async () => {
- setAvailableTrainedModels((await ml?.mlApi?.trainedModels?.getTrainedModels()) ?? []);
- };
- fetchAvailableTrainedModels();
- }, [ml]);
- const trainedModels = useMemo(() => {
- const availableTrainedModelsList = availableTrainedModels
- .filter(
- (model: TrainedModelConfigResponse) =>
- model.model_type === TRAINED_MODEL_TYPE.PYTORCH &&
- (model?.inference_config
- ? Object.keys(model.inference_config).includes(SUPPORTED_PYTORCH_TASKS.TEXT_EMBEDDING)
- : {})
- )
- .map((model: TrainedModelConfigResponse) => model.model_id);
+ const { isLoading, data: endpoints, resendRequest } = useLoadInferenceEndpoints();
- return availableTrainedModelsList;
- }, [availableTrainedModels]);
- const [isSaveInferenceLoading, setIsSaveInferenceLoading] = useState(false);
+ const onSubmitSuccess = useCallback(
+ (newEndpointId: string) => {
+ resendRequest();
+ setValue(newEndpointId);
- const { isLoading, data: endpoints, resendRequest } = useLoadInferenceEndpoints();
+ setIsInferenceFlyoutVisible(!isInferenceFlyoutVisible);
+ },
+ [isInferenceFlyoutVisible, resendRequest, setValue]
+ );
const options: EuiSelectableOption[] = useMemo(() => {
const filteredEndpoints = endpoints?.filter(
@@ -152,52 +130,7 @@ const SelectInferenceIdContent: React.FC = ({
return newOptions;
}, [endpoints, value]);
- const { showErrorToasts } = useMLModelNotificationToasts();
-
- const onSaveInferenceCallback = useCallback(
- async (inferenceId: string, taskType: InferenceTaskType, modelConfig: ModelConfig) => {
- try {
- const trainedModelId = modelConfig.service_settings.model_id || '';
- const customModelConfig = {
- taskType,
- modelConfig,
- };
- setIsSaveInferenceLoading(true);
- await createInferenceEndpoint(trainedModelId, inferenceId, customModelConfig);
- resendRequest();
- setValue(inferenceId);
- setIsInferenceFlyoutVisible(!isInferenceFlyoutVisible);
- setIsSaveInferenceLoading(false);
- } catch (error) {
- showErrorToasts(error);
- setIsSaveInferenceLoading(false);
- }
- },
- [createInferenceEndpoint, setValue, isInferenceFlyoutVisible, showErrorToasts, resendRequest]
- );
const [isInferencePopoverVisible, setIsInferencePopoverVisible] = useState(false);
- const [inferenceEndpointError, setInferenceEndpointError] = useState(
- undefined
- );
- const onInferenceEndpointChange = useCallback(
- async (inferenceId: string) => {
- const modelsExist = options.some((i) => i.label === inferenceId);
- if (modelsExist) {
- setInferenceEndpointError(
- i18n.translate(
- 'xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.defaultLabel',
- {
- defaultMessage: 'Inference endpoint {inferenceId} already exists',
- values: { inferenceId },
- }
- )
- );
- } else {
- setInferenceEndpointError(undefined);
- }
- },
- [options]
- );
const selectedOptionLabel = options.find((option) => option.checked)?.label;
@@ -234,8 +167,26 @@ const SelectInferenceIdContent: React.FC = ({
panelPaddingSize="m"
closePopover={() => setIsInferencePopoverVisible(!isInferencePopoverVisible)}
>
- {inferenceEndpointsPageLink && (
-
+
+ {
+ e.preventDefault();
+ setIsInferenceFlyoutVisible(true);
+ setIsInferencePopoverVisible(!isInferencePopoverVisible);
+ }}
+ >
+ {i18n.translate(
+ 'xpack.idxMgmt.mappingsEditor.parameters.inferenceId.popover.createInferenceEndpointButton',
+ {
+ defaultMessage: 'Add inference endpoint',
+ }
+ )}
+
+ {inferenceEndpointsPageLink && (
= ({
}
)}
-
- )}
+ )}
+
@@ -327,22 +278,18 @@ const SelectInferenceIdContent: React.FC = ({
{inferencePopover()}
- {isInferenceFlyoutVisible && (
-
- )}
+ {isInferenceFlyoutVisible ? (
+ }>
+
+
+ ) : null}
;
@@ -77,8 +76,9 @@ export const CreateField = React.memo(function CreateFieldComponent({
semanticTextInfo,
createFieldFormRef,
}: Props) {
- const { isSemanticTextEnabled, ml, setErrorsInTrainedModelDeployment } = semanticTextInfo ?? {};
+ const { isSemanticTextEnabled } = semanticTextInfo ?? {};
const dispatch = useDispatch();
+ const { fields, mappingViewFields } = useMappingsState();
const fieldTypeInputRef = useRef(null);
const { form } = useForm({
@@ -106,18 +106,40 @@ export const CreateField = React.memo(function CreateFieldComponent({
}
};
- const { createInferenceEndpoint } = useSemanticText({
- form,
- setErrorsInTrainedModelDeployment,
- ml,
- });
-
const isSemanticText = form.getFormData().type === 'semantic_text';
useEffect(() => {
if (createFieldFormRef?.current) createFieldFormRef?.current.focus();
}, [createFieldFormRef]);
+ useEffect(() => {
+ if (isSemanticText) {
+ const allSemanticFields = {
+ byId: {
+ ...fields.byId,
+ ...mappingViewFields.byId,
+ },
+ rootLevelFields: [],
+ aliases: {},
+ maxNestedDepth: 0,
+ };
+ const defaultName = getFieldByPathName(allSemanticFields, 'semantic_text')
+ ? ''
+ : 'semantic_text';
+ const referenceField =
+ Object.values(allSemanticFields.byId)
+ .find((field) => field.source.type === 'text' && !field.isMultiField)
+ ?.path.join('.') || '';
+ if (!form.getFormData().name) {
+ form.setFieldValue('name', defaultName);
+ }
+ if (!form.getFormData().reference_field) {
+ form.setFieldValue('reference_field', referenceField);
+ }
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [isSemanticText]);
+
const submitForm = async (
e?: React.FormEvent,
exitAfter: boolean = false,
@@ -287,9 +309,7 @@ export const CreateField = React.memo(function CreateFieldComponent({
{renderRequiredParametersForm()}
- {isSemanticText && (
-
- )}
+ {isSemanticText && }
{renderFormActions()}
diff --git a/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/semantic_text/use_semantic_text.test.ts b/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/semantic_text/use_semantic_text.test.ts
deleted file mode 100644
index e0ce2db2446ee..0000000000000
--- a/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/semantic_text/use_semantic_text.test.ts
+++ /dev/null
@@ -1,273 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-
-import { renderHook } from '@testing-library/react';
-import { CustomInferenceEndpointConfig, SemanticTextField } from '../../../../../types';
-import { useSemanticText } from './use_semantic_text';
-import { act } from 'react-dom/test-utils';
-
-jest.mock('../../../../../../../../hooks/use_details_page_mappings_model_management', () => ({
- useDetailsPageMappingsModelManagement: () => ({
- fetchInferenceToModelIdMap: () => ({
- '.preconfigured_elser': {
- isDeployed: false,
- isDeployable: true,
- trainedModelId: '.elser_model_2',
- },
- '.preconfigured_e5': {
- isDeployed: false,
- isDeployable: true,
- trainedModelId: '.multilingual-e5-small',
- },
- openai: {
- isDeployed: false,
- isDeployable: false,
- trainedModelId: '',
- },
- my_elser_endpoint: {
- isDeployed: false,
- isDeployable: true,
- trainedModelId: '.elser_model_2',
- },
- }),
- }),
-}));
-
-const mlMock: any = {
- mlApi: {
- inferenceModels: {
- createInferenceEndpoint: jest.fn().mockResolvedValue({}),
- },
- },
-};
-
-const mockField: Record = {
- elser_model_2: {
- name: 'name',
- type: 'semantic_text',
- inference_id: '.preconfigured_elser',
- reference_field: 'title',
- },
- e5: {
- name: 'name',
- type: 'semantic_text',
- inference_id: '.preconfigured_e5',
- reference_field: 'title',
- },
- openai: {
- name: 'name',
- type: 'semantic_text',
- inference_id: 'openai',
- reference_field: 'title',
- },
- my_elser_endpoint: {
- name: 'name',
- type: 'semantic_text',
- inference_id: 'my_elser_endpoint',
- reference_field: 'title',
- },
-};
-
-const mockConfig: Record = {
- openai: {
- taskType: 'text_embedding',
- modelConfig: {
- service: 'openai',
- service_settings: {
- api_key: 'test',
- model_id: 'text-embedding-ada-002',
- },
- },
- },
- elser: {
- taskType: 'sparse_embedding',
- modelConfig: {
- service: 'elser',
- service_settings: {
- num_allocations: 1,
- num_threads: 1,
- },
- },
- },
-};
-
-const mockDispatch = jest.fn();
-
-jest.mock('../../../../../mappings_state_context', () => ({
- useMappingsState: jest.fn().mockReturnValue({
- inferenceToModelIdMap: {
- '.preconfigured_elser': {
- isDeployed: false,
- isDeployable: true,
- trainedModelId: '.elser_model_2',
- },
- '.preconfigured_e5': {
- isDeployed: false,
- isDeployable: true,
- trainedModelId: '.multilingual-e5-small',
- },
- openai: {
- isDeployed: false,
- isDeployable: false,
- trainedModelId: '',
- },
- my_elser_endpoint: {
- isDeployed: false,
- isDeployable: true,
- trainedModelId: '.elser_model_2',
- },
- },
- fields: {
- byId: {},
- },
- mappingViewFields: { byId: {} },
- }),
- useDispatch: () => mockDispatch,
-}));
-
-jest.mock('../../../../../../component_templates/component_templates_context', () => ({
- useComponentTemplatesContext: jest.fn().mockReturnValue({
- toasts: {
- addError: jest.fn(),
- addSuccess: jest.fn(),
- },
- }),
-}));
-
-jest.mock('../../../../../../../services/api', () => ({
- getInferenceEndpoints: jest.fn().mockResolvedValue({
- data: [
- {
- inference_id: '.preconfigured_e5',
- task_type: 'text_embedding',
- service: 'elasticsearch',
- service_settings: {
- num_allocations: 1,
- num_threads: 1,
- model_id: '.multilingual-e5-small',
- },
- task_settings: {},
- },
- ],
- }),
-}));
-
-describe('useSemanticText', () => {
- let mockForm: any;
-
- beforeEach(() => {
- jest.clearAllMocks();
- mockForm = {
- form: {
- getFormData: jest.fn().mockReturnValue({
- referenceField: 'title',
- name: 'sem',
- type: 'semantic_text',
- inferenceId: 'e5',
- }),
- setFieldValue: jest.fn(),
- },
- thirdPartyModel: {
- getFormData: jest.fn().mockReturnValue({
- referenceField: 'title',
- name: 'semantic_text_openai_endpoint',
- type: 'semantic_text',
- inferenceId: 'openai',
- }),
- setFieldValue: jest.fn(),
- },
- elasticModelEndpointCreatedfromFlyout: {
- getFormData: jest.fn().mockReturnValue({
- referenceField: 'title',
- name: 'semantic_text_elserServiceType_endpoint',
- type: 'semantic_text',
- inferenceId: 'my_elser_endpoint',
- }),
- setFieldValue: jest.fn(),
- },
- };
- });
- it('should handle semantic text with third party model correctly', async () => {
- const { result } = renderHook(() =>
- useSemanticText({
- form: mockForm.thirdPartyModel,
- setErrorsInTrainedModelDeployment: jest.fn(),
- ml: mlMock,
- })
- );
- await act(async () => {
- result.current.handleSemanticText(mockField.openai, mockConfig.openai);
- });
- expect(mockDispatch).toHaveBeenCalledWith({
- type: 'field.add',
- value: mockField.openai,
- });
- expect(mlMock.mlApi.inferenceModels.createInferenceEndpoint).toHaveBeenCalledWith(
- 'openai',
- 'text_embedding',
- mockConfig.openai.modelConfig
- );
- });
-
- it('should handle semantic text correctly', async () => {
- const { result } = renderHook(() =>
- useSemanticText({
- form: mockForm.form,
- setErrorsInTrainedModelDeployment: jest.fn(),
- ml: mlMock,
- })
- );
-
- await act(async () => {
- result.current.handleSemanticText(mockField.elser_model_2);
- });
-
- expect(mockDispatch).toHaveBeenCalledWith({
- type: 'field.add',
- value: mockField.elser_model_2,
- });
- });
- it('does not call create inference endpoint api, if default endpoint already exists', async () => {
- const { result } = renderHook(() =>
- useSemanticText({
- form: mockForm.form,
- setErrorsInTrainedModelDeployment: jest.fn(),
- ml: mlMock,
- })
- );
-
- await act(async () => {
- result.current.handleSemanticText(mockField.e5);
- });
-
- expect(mockDispatch).toHaveBeenCalledWith({
- type: 'field.add',
- value: mockField.e5,
- });
-
- expect(mlMock.mlApi.inferenceModels.createInferenceEndpoint).not.toBeCalled();
- });
-
- it('handles errors correctly', async () => {
- const mockError = new Error('Test error');
- mlMock.mlApi?.inferenceModels.createInferenceEndpoint.mockImplementationOnce(() => {
- throw mockError;
- });
-
- const setErrorsInTrainedModelDeployment = jest.fn();
-
- const { result } = renderHook(() =>
- useSemanticText({ form: mockForm.form, setErrorsInTrainedModelDeployment, ml: mlMock })
- );
-
- await act(async () => {
- result.current.handleSemanticText(mockField.elser_model_2);
- });
-
- expect(setErrorsInTrainedModelDeployment).toHaveBeenCalledWith(expect.any(Function));
- });
-});
diff --git a/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/semantic_text/use_semantic_text.ts b/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/semantic_text/use_semantic_text.ts
deleted file mode 100644
index a7b380fd120cd..0000000000000
--- a/x-pack/platform/plugins/shared/index_management/public/application/components/mappings_editor/components/document_fields/fields/create_field/semantic_text/use_semantic_text.ts
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-
-import { useCallback } from 'react';
-import { MlPluginStart } from '@kbn/ml-plugin/public';
-import React, { useEffect } from 'react';
-import { InferenceTaskType } from '@elastic/elasticsearch/lib/api/types';
-import { ElserModels } from '@kbn/ml-trained-models-utils';
-import { i18n } from '@kbn/i18n';
-import { useDetailsPageMappingsModelManagement } from '../../../../../../../../hooks/use_details_page_mappings_model_management';
-import { useDispatch, useMappingsState } from '../../../../../mappings_state_context';
-import { FormHook } from '../../../../../shared_imports';
-import { CustomInferenceEndpointConfig, Field, SemanticTextField } from '../../../../../types';
-import { useMLModelNotificationToasts } from '../../../../../../../../hooks/use_ml_model_status_toasts';
-
-import { getInferenceEndpoints } from '../../../../../../../services/api';
-import { getFieldByPathName } from '../../../../../lib/utils';
-
-interface UseSemanticTextProps {
- form: FormHook;
- ml?: MlPluginStart;
- setErrorsInTrainedModelDeployment?: React.Dispatch<
- React.SetStateAction>
- >;
-}
-interface DefaultInferenceEndpointConfig {
- taskType: InferenceTaskType;
- service: string;
-}
-
-export function useSemanticText(props: UseSemanticTextProps) {
- const { form, setErrorsInTrainedModelDeployment, ml } = props;
- const { fields, mappingViewFields } = useMappingsState();
- const { fetchInferenceToModelIdMap } = useDetailsPageMappingsModelManagement();
- const dispatch = useDispatch();
- const { showSuccessToasts, showErrorToasts } = useMLModelNotificationToasts();
-
- const fieldTypeValue = form.getFormData()?.type;
- useEffect(() => {
- if (fieldTypeValue === 'semantic_text') {
- const allFields = {
- byId: {
- ...fields.byId,
- ...mappingViewFields.byId,
- },
- rootLevelFields: [],
- aliases: {},
- maxNestedDepth: 0,
- };
- const defaultName = getFieldByPathName(allFields, 'semantic_text') ? '' : 'semantic_text';
- const referenceField =
- Object.values(allFields.byId)
- .find((field) => field.source.type === 'text' && !field.isMultiField)
- ?.path.join('.') || '';
- if (!form.getFormData().name) {
- form.setFieldValue('name', defaultName);
- }
- if (!form.getFormData().reference_field) {
- form.setFieldValue('reference_field', referenceField);
- }
- }
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, [fieldTypeValue]);
-
- const createInferenceEndpoint = useCallback(
- async (
- trainedModelId: string,
- inferenceId: string,
- customInferenceEndpointConfig?: CustomInferenceEndpointConfig
- ) => {
- const isElser = ElserModels.includes(trainedModelId);
- const defaultInferenceEndpointConfig: DefaultInferenceEndpointConfig = {
- service: isElser ? 'elser' : 'elasticsearch',
- taskType: isElser ? 'sparse_embedding' : 'text_embedding',
- };
-
- const modelConfig = customInferenceEndpointConfig
- ? customInferenceEndpointConfig.modelConfig
- : {
- service: defaultInferenceEndpointConfig.service,
- service_settings: {
- adaptive_allocations: { enabled: true },
- num_threads: 1,
- model_id: trainedModelId,
- },
- };
- const taskType: InferenceTaskType =
- customInferenceEndpointConfig?.taskType ?? defaultInferenceEndpointConfig.taskType;
-
- await ml?.mlApi?.inferenceModels?.createInferenceEndpoint(inferenceId, taskType, modelConfig);
- },
- [ml?.mlApi?.inferenceModels]
- );
-
- const handleSemanticText = async (
- data: SemanticTextField,
- customInferenceEndpointConfig?: CustomInferenceEndpointConfig
- ) => {
- const modelIdMap = await fetchInferenceToModelIdMap();
- const inferenceId = data.inference_id;
- const inferenceData = modelIdMap?.[inferenceId];
- if (!inferenceData) {
- throw new Error(
- i18n.translate('xpack.idxMgmt.mappingsEditor.semanticText.inferenceError', {
- defaultMessage: 'No inference model found for inference ID {inferenceId}',
- values: { inferenceId },
- })
- );
- }
-
- const { trainedModelId } = inferenceData;
- dispatch({ type: 'field.add', value: data });
- const inferenceEndpoints = await getInferenceEndpoints();
- const hasInferenceEndpoint = inferenceEndpoints.data?.some(
- (inference) => inference.inference_id === inferenceId
- );
- // if inference endpoint exists already, do not create new inference endpoint
- if (hasInferenceEndpoint) {
- return;
- }
- try {
- // Only show toast if it's an internal Elastic model that hasn't been deployed yet
- await createInferenceEndpoint(
- trainedModelId,
- data.inference_id,
- customInferenceEndpointConfig
- );
- if (trainedModelId) {
- if (inferenceData.isDeployable && !inferenceData.isDeployed) {
- showSuccessToasts(trainedModelId);
- }
- // clear error because we've succeeded here
- setErrorsInTrainedModelDeployment?.((prevItems) => ({
- ...prevItems,
- [data.inference_id]: undefined,
- }));
- }
- } catch (error) {
- // trainedModelId is empty string when it's a third party model
- if (trainedModelId) {
- setErrorsInTrainedModelDeployment?.((prevItems) => ({
- ...prevItems,
- [data.inference_id]: error,
- }));
- }
- showErrorToasts(error);
- }
- };
-
- return {
- createInferenceEndpoint,
- handleSemanticText,
- };
-}
diff --git a/x-pack/platform/plugins/shared/index_management/public/application/hooks/use_add_endpoint.test.tsx b/x-pack/platform/plugins/shared/index_management/public/application/hooks/use_add_endpoint.test.tsx
new file mode 100644
index 0000000000000..f5f425173c2d1
--- /dev/null
+++ b/x-pack/platform/plugins/shared/index_management/public/application/hooks/use_add_endpoint.test.tsx
@@ -0,0 +1,92 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { act, renderHook } from '@testing-library/react';
+import { useAddEndpoint } from './use_add_endpoint';
+import { useMLModelNotificationToasts } from '../../hooks/use_ml_model_status_toasts';
+import { createInferenceEndpoint } from '../services';
+
+jest.mock('../services', () => ({
+ createInferenceEndpoint: jest.fn(),
+}));
+
+jest.mock('../../hooks/use_ml_model_status_toasts', () => ({
+ useMLModelNotificationToasts: jest.fn(),
+}));
+
+const mockOnSuccess = jest.fn();
+const mockOnError = jest.fn();
+const mockShowErrorToast = jest.fn();
+const mockShowSuccessToast = jest.fn();
+
+describe('useAddEndpoint', () => {
+ const mockConfig: any = {
+ provider: 'elasticsearch',
+ taskType: 'text_embedding',
+ inferenceId: 'es-endpoint-1',
+ providerConfig: {
+ num_allocations: 1,
+ num_threads: 2,
+ model_id: '.multilingual-e5-small',
+ },
+ };
+ const mockSecrets: any = { providerSecrets: {} };
+
+ const mockInferenceEndpoint = {
+ config: mockConfig,
+ secrets: mockSecrets,
+ };
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ (useMLModelNotificationToasts as jest.Mock).mockReturnValue({
+ showInferenceCreationErrorToasts: mockShowErrorToast,
+ showInferenceSuccessToast: mockShowSuccessToast,
+ });
+ });
+
+ it('calls onSuccess and shows success toast on successful endpoint creation', async () => {
+ (createInferenceEndpoint as jest.Mock).mockResolvedValueOnce({ error: null });
+
+ const { result } = renderHook(() => useAddEndpoint());
+
+ await act(async () => {
+ await result.current.addInferenceEndpoint(mockInferenceEndpoint, mockOnSuccess, mockOnError);
+ });
+
+ expect(createInferenceEndpoint).toHaveBeenCalledWith(
+ 'text_embedding',
+ 'es-endpoint-1',
+ mockInferenceEndpoint
+ );
+ expect(mockShowSuccessToast).toHaveBeenCalledTimes(1);
+ expect(mockShowErrorToast).not.toHaveBeenCalled();
+ expect(mockOnSuccess).toHaveBeenCalledTimes(1);
+ expect(mockOnError).not.toHaveBeenCalled();
+ });
+
+ it('calls onError and shows error toast on endpoint creation failure', async () => {
+ const mockError = new Error('Endpoint creation failed');
+ (createInferenceEndpoint as jest.Mock).mockResolvedValueOnce({ error: mockError });
+
+ const { result } = renderHook(() => useAddEndpoint());
+
+ await act(async () => {
+ await result.current.addInferenceEndpoint(mockInferenceEndpoint, mockOnSuccess, mockOnError);
+ });
+
+ expect(createInferenceEndpoint).toHaveBeenCalledWith(
+ 'text_embedding',
+ 'es-endpoint-1',
+ mockInferenceEndpoint
+ );
+ expect(mockShowErrorToast).toHaveBeenCalledWith('Endpoint creation failed');
+ expect(mockShowSuccessToast).not.toHaveBeenCalled();
+ expect(mockOnError).toHaveBeenCalledTimes(1);
+ expect(mockOnSuccess).not.toHaveBeenCalled();
+ });
+});
diff --git a/x-pack/platform/plugins/shared/index_management/public/application/hooks/use_add_endpoint.ts b/x-pack/platform/plugins/shared/index_management/public/application/hooks/use_add_endpoint.ts
new file mode 100644
index 0000000000000..f5b2f98610775
--- /dev/null
+++ b/x-pack/platform/plugins/shared/index_management/public/application/hooks/use_add_endpoint.ts
@@ -0,0 +1,48 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { useCallback } from 'react';
+
+import { InferenceEndpoint } from '@kbn/inference-endpoint-ui-common';
+import { useMLModelNotificationToasts } from '../../hooks/use_ml_model_status_toasts';
+import { createInferenceEndpoint } from '../services';
+
+export const useAddEndpoint = () => {
+ const { showInferenceCreationErrorToasts, showInferenceSuccessToast } =
+ useMLModelNotificationToasts();
+
+ const addInferenceEndpoint = useCallback(
+ async (
+ inferenceEndpoint: InferenceEndpoint,
+ onSuccess?: (inferenceId: string) => void,
+ onError?: () => void
+ ) => {
+ const { error } = await createInferenceEndpoint(
+ inferenceEndpoint.config.taskType,
+ inferenceEndpoint.config.inferenceId,
+ inferenceEndpoint
+ );
+
+ if (error) {
+ showInferenceCreationErrorToasts(error?.message);
+ if (onError) {
+ onError();
+ }
+ } else {
+ showInferenceSuccessToast();
+ if (onSuccess) {
+ onSuccess(inferenceEndpoint.config.inferenceId);
+ }
+ }
+ },
+ [showInferenceCreationErrorToasts, showInferenceSuccessToast]
+ );
+
+ return {
+ addInferenceEndpoint,
+ };
+};
diff --git a/x-pack/platform/plugins/shared/index_management/public/application/services/api.ts b/x-pack/platform/plugins/shared/index_management/public/application/services/api.ts
index 8fb3524a65a0f..5939163a4fb1f 100644
--- a/x-pack/platform/plugins/shared/index_management/public/application/services/api.ts
+++ b/x-pack/platform/plugins/shared/index_management/public/application/services/api.ts
@@ -9,7 +9,8 @@ import { METRIC_TYPE } from '@kbn/analytics';
import type { SerializedEnrichPolicy } from '@kbn/index-management-shared-types';
import { IndicesStatsResponse } from '@elastic/elasticsearch/lib/api/types';
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
-import { MappingTypeMapping } from '@elastic/elasticsearch/lib/api/types';
+import { MappingTypeMapping } from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
+import { InferenceEndpoint } from '@kbn/inference-endpoint-ui-common';
import {
API_BASE_PATH,
INTERNAL_API_BASE_PATH,
@@ -460,6 +461,18 @@ export function getInferenceEndpoints() {
});
}
+export function createInferenceEndpoint(
+ taskType: string,
+ inferenceId: string,
+ inferenceEndpoint: InferenceEndpoint
+) {
+ return sendRequest({
+ path: `/internal/inference_endpoint/endpoints/${taskType}/${inferenceId}`,
+ method: 'put',
+ body: JSON.stringify(inferenceEndpoint),
+ });
+}
+
export function useLoadInferenceEndpoints() {
return useRequest({
path: `${API_BASE_PATH}/inference/all`,
diff --git a/x-pack/platform/plugins/shared/index_management/public/application/services/index.ts b/x-pack/platform/plugins/shared/index_management/public/application/services/index.ts
index 09d9065b2b729..5ad0cadede763 100644
--- a/x-pack/platform/plugins/shared/index_management/public/application/services/index.ts
+++ b/x-pack/platform/plugins/shared/index_management/public/application/services/index.ts
@@ -28,6 +28,7 @@ export {
useLoadIndexSettings,
createIndex,
useLoadInferenceEndpoints,
+ createInferenceEndpoint,
} from './api';
export { sortTable } from './sort_table';
diff --git a/x-pack/platform/plugins/shared/index_management/public/hooks/use_ml_model_status_toasts.ts b/x-pack/platform/plugins/shared/index_management/public/hooks/use_ml_model_status_toasts.ts
index 7b553f37498d5..2b9efa7a32f30 100644
--- a/x-pack/platform/plugins/shared/index_management/public/hooks/use_ml_model_status_toasts.ts
+++ b/x-pack/platform/plugins/shared/index_management/public/hooks/use_ml_model_status_toasts.ts
@@ -46,10 +46,36 @@ export function useMLModelNotificationToasts() {
const showErrorToasts = (error: ErrorType) => {
const errorObj = extractErrorProperties(error);
return toasts.addError(new MLRequestFailure(errorObj, error), {
- title: i18n.translate('xpack.idxMgmt.mappingsEditor.createField.modelDeploymentErrorTitle', {
+ title: i18n.translate('xpack.idxMgmt.mappingsEditor.createField.inferenceErrorTitle', {
defaultMessage: 'Model deployment failed',
}),
});
};
- return { showSuccessToasts, showErrorToasts, showSuccessfullyDeployedToast };
+
+ const showInferenceCreationErrorToasts = (error: ErrorType) => {
+ const errorObj = extractErrorProperties(error);
+ return toasts.addError(new MLRequestFailure(errorObj, error), {
+ title: i18n.translate(
+ 'xpack.idxMgmt.mappingsEditor.createField.inferenceCreationErrorTitle',
+ {
+ defaultMessage: 'Endpoint creation failed',
+ }
+ ),
+ });
+ };
+
+ const showInferenceSuccessToast = () => {
+ return toasts.addSuccess({
+ title: i18n.translate('xpack.idxMgmt.mappingsEditor.createField.endpointAddedSuccess', {
+ defaultMessage: 'Inference endpoint added',
+ }),
+ });
+ };
+ return {
+ showSuccessToasts,
+ showErrorToasts,
+ showSuccessfullyDeployedToast,
+ showInferenceCreationErrorToasts,
+ showInferenceSuccessToast,
+ };
}
diff --git a/x-pack/platform/plugins/shared/index_management/tsconfig.json b/x-pack/platform/plugins/shared/index_management/tsconfig.json
index 41514049a13a8..d88eae7dbad66 100644
--- a/x-pack/platform/plugins/shared/index_management/tsconfig.json
+++ b/x-pack/platform/plugins/shared/index_management/tsconfig.json
@@ -55,6 +55,7 @@
"@kbn/unsaved-changes-prompt",
"@kbn/shared-ux-table-persist",
"@kbn/core-application-browser",
+ "@kbn/inference-endpoint-ui-common",
],
"exclude": ["target/**/*"]
}