diff --git a/docs/source/openvino/models.mdx b/docs/source/openvino/models.mdx index fe6fbeea88..cf2c54f593 100644 --- a/docs/source/openvino/models.mdx +++ b/docs/source/openvino/models.mdx @@ -53,6 +53,7 @@ Here is the list of the supported architectures : - Encoder Decoder - ESM - Exaone +- Exaone4 - Falcon - Falcon-Mamba - Flaubert diff --git a/optimum/exporters/openvino/model_configs.py b/optimum/exporters/openvino/model_configs.py index 4f5b0a55a4..96d0f07535 100644 --- a/optimum/exporters/openvino/model_configs.py +++ b/optimum/exporters/openvino/model_configs.py @@ -597,6 +597,21 @@ class ExaoneOpenVINOConfig(LlamaOpenVINOConfig): pass +@register_in_tasks_manager( + "exaone4", + *[ + "feature-extraction", + "feature-extraction-with-past", + "text-generation", + "text-generation-with-past", + "text-classification", + ], + library_name="transformers", +) +class Exaone4OpenVINOConfig(LlamaOpenVINOConfig): + MIN_TRANSFORMERS_VERSION = "4.54.0" + + @register_in_tasks_manager( "arcee", *[ diff --git a/tests/openvino/test_decoder.py b/tests/openvino/test_decoder.py index be2c289628..eefdf9c955 100644 --- a/tests/openvino/test_decoder.py +++ b/tests/openvino/test_decoder.py @@ -122,6 +122,7 @@ class OVModelForCausalLMIntegrationTest(unittest.TestCase): if is_transformers_version(">=", "4.54.0"): # remote code models differs after transformers v4.54 + SUPPORTED_ARCHITECTURES += ("exaone4",) SUPPORTED_ARCHITECTURES = tuple(set(SUPPORTED_ARCHITECTURES) - {"minicpm", "minicpm3", "arctic", "deepseek"}) if is_transformers_version(">=", "4.55.0"): @@ -145,6 +146,7 @@ class OVModelForCausalLMIntegrationTest(unittest.TestCase): "arctic", "chatglm4", "exaone", + "exaone4", "decilm", "minicpm3", "deepseek", @@ -199,6 +201,7 @@ class OVModelForCausalLMIntegrationTest(unittest.TestCase): "phi3": 2, "gemma2": 4, "exaone": 8, + "exaone4": 1, "granite": 6, "granite-moe": 6, "glm": 28, diff --git a/tests/openvino/test_export.py b/tests/openvino/test_export.py index 98599ac31b..7767ddfd6c 100644 --- a/tests/openvino/test_export.py +++ b/tests/openvino/test_export.py @@ -90,6 +90,9 @@ class ExportModelTest(unittest.TestCase): if is_transformers_version(">=", "4.49"): SUPPORTED_ARCHITECTURES.update({"zamba2": OVModelForCausalLM}) + if is_transformers_version(">=", "4.54"): + SUPPORTED_ARCHITECTURES.update({"exaone4": OVModelForCausalLM}) + EXPECTED_DIFFUSERS_SCALE_FACTORS = { "stable-diffusion-xl": {"vae_encoder": "128.0", "vae_decoder": "128.0"}, "stable-diffusion-3": {"text_encoder_3": "8.0"}, diff --git a/tests/openvino/test_exporters_cli.py b/tests/openvino/test_exporters_cli.py index bba9c3b92a..1c2fe9fe22 100644 --- a/tests/openvino/test_exporters_cli.py +++ b/tests/openvino/test_exporters_cli.py @@ -111,6 +111,15 @@ class OVCLIExportTestCase(unittest.TestCase): ("text-generation-with-past", "zamba2"), ] ) + + if is_transformers_version(">=", "4.54"): + SUPPORTED_ARCHITECTURES.extend( + [ + ("text-generation-with-past", "exaone4"), + ("feature-extraction", "exaone4"), + ("text-classification", "exaone4"), + ] + ) EXPECTED_NUMBER_OF_TOKENIZER_MODELS = { "gpt2": 2 if is_tokenizers_version("<", "0.20") or is_openvino_version(">=", "2024.5") else 0, "t5": 0 if is_openvino_version("<", "2025.1") else 2, # 2025.1 brings support for unigram tokenizers @@ -136,6 +145,7 @@ class OVCLIExportTestCase(unittest.TestCase): "falcon-mamba": 2, "qwen3": 2, "zamba2": 2, + "exaone4": 2, } TOKENIZER_CHAT_TEMPLATE_TESTS_MODELS = { diff --git a/tests/openvino/utils_tests.py b/tests/openvino/utils_tests.py index 7801ba17bb..ade4b8ecb3 100644 --- a/tests/openvino/utils_tests.py +++ b/tests/openvino/utils_tests.py @@ -77,6 +77,7 @@ "electra": "optimum-intel-internal-testing/tiny-random-electra", "esm": "optimum-intel-internal-testing/tiny-random-EsmModel", "exaone": "optimum-intel-internal-testing/tiny-random-exaone", + "exaone4": "optimum-intel-internal-testing/tiny-random-exaone4", "gemma": "optimum-intel-internal-testing/tiny-random-GemmaForCausalLM", "gemma2": "optimum-intel-internal-testing/tiny-random-gemma2", "got_ocr2": "optimum-intel-internal-testing/tiny-random-got-ocr2-hf", @@ -338,6 +339,7 @@ "resampler_model": 6, }, "zamba2": {"model": 44}, + "exaone4": {"model": 16}, } TEST_IMAGE_URL = "http://images.cocodataset.org/val2017/000000039769.jpg"