From ff96499d5c634d6c9e204eac5d7fdef9b2d47bfd Mon Sep 17 00:00:00 2001 From: Haoran Date: Thu, 13 Mar 2025 00:38:13 +0400 Subject: [PATCH] fix: accept open ai compatible local llm --- .../llms/openai/pandasai_openai/openai.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/extensions/llms/openai/pandasai_openai/openai.py b/extensions/llms/openai/pandasai_openai/openai.py index 93e9c301a..31a7d972c 100644 --- a/extensions/llms/openai/pandasai_openai/openai.py +++ b/extensions/llms/openai/pandasai_openai/openai.py @@ -2,6 +2,7 @@ from typing import Any, Dict, Optional import openai +import requests from pandasai.exceptions import APIKeyNotFoundError, UnsupportedModelError from pandasai.helpers import load_dotenv @@ -81,7 +82,21 @@ def __init__( self._is_chat_model = False self.client = openai.OpenAI(**self._client_params).completions else: - raise UnsupportedModelError(self.model) + self._is_chat_model = kwargs.get("is_chat_model", True) + model_names = [ + model.get("id") + for model in requests.get(f"{self.api_base}/models") + .json() + .get("data", []) + ] + if self.model in model_names: + self.client = ( + openai.OpenAI(**self._client_params).chat.completions + if self._is_chat_model + else openai.OpenAI(**self._client_params).completions + ) + else: + raise UnsupportedModelError(self.model) @property def _default_params(self) -> Dict[str, Any]: