From 237dd574eb7e94710df95858183957839496e745 Mon Sep 17 00:00:00 2001 From: Shahriar Yazdipour Date: Tue, 17 Jun 2025 17:00:19 +0200 Subject: [PATCH 1/4] feat: Add Azure OpenAI support and configuration instructions --- README.md | 17 ++++++++- src/chatdbg/assistant/assistant.py | 60 ++++++++++++++++++++++++------ 2 files changed, 65 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 35c9de89..79f8dd09 100644 --- a/README.md +++ b/README.md @@ -26,13 +26,28 @@ For technical details and a complete evaluation, see our paper (to appear at FSE > [!IMPORTANT] > -> ChatDBG currently needs to be connected to an [OpenAI account](https://openai.com/api/). _Your account will need to have a positive balance for this to work_ ([check your balance](https://platform.openai.com/account/usage)). If you have never purchased credits, you will need to purchase at least \$1 in credits (if your API account was created before August 13, 2023) or \$0.50 (if you have a newer API account) in order to have access to GPT-4, which ChatDBG uses. [Get a key here.](https://platform.openai.com/account/api-keys) +> ChatDBG can be used with either OpenAI or Azure OpenAI. +> +> ### OpenAI Configuration +> ChatDBG needs to be connected to an [OpenAI account](https://openai.com/api/). _Your account will need to have a positive balance for this to work_ ([check your balance](https://platform.openai.com/account/usage)). If you have never purchased credits, you will need to purchase at least \$1 in credits (if your API account was created before August 13, 2023) or \$0.50 (if you have a newer API account) in order to have access to GPT-4, which ChatDBG uses. [Get a key here.](https://platform.openai.com/account/api-keys) > > Once you have an API key, set it as an environment variable called `OPENAI_API_KEY`. > > ```bash > export OPENAI_API_KEY= > ``` +> +> ### Azure OpenAI Configuration +> Alternatively, you can use Azure OpenAI by setting the following environment variables: +> +> ```bash +> export AZURE_API_KEY= +> export AZURE_API_BASE= # e.g., https://YOUR_RESOURCE.openai.azure.com +> export AZURE_API_VERSION= # e.g., 2024-02-15-preview +> ``` +> +> When using Azure OpenAI, you should use your deployment name as the model name. The deployment should be using GPT-4 or a compatible model that supports function calling. +> Install ChatDBG using `pip` (you need to do this whether you are debugging Python, C, or C++ code): diff --git a/src/chatdbg/assistant/assistant.py b/src/chatdbg/assistant/assistant.py index ea104548..ce6dee8d 100644 --- a/src/chatdbg/assistant/assistant.py +++ b/src/chatdbg/assistant/assistant.py @@ -1,4 +1,5 @@ import json +import os import string import textwrap import time @@ -42,6 +43,18 @@ def __init__( # Hide their debugging info -- it messes with our error handling litellm.suppress_debug_info = True + # Configure Azure OpenAI if environment variables are present + if all(k in os.environ for k in ["AZURE_API_KEY", "AZURE_API_BASE", "AZURE_API_VERSION"]): + # For Azure OpenAI, model should be the deployment name + if not model.startswith("azure_") and not model.startswith("azure/"): + # Keep track that we're using Azure + self._using_azure = True + # Store original model name for error messages + self._base_model = model + else: + self._using_azure = False + self._base_model = model + self._clients = listeners self._functions = {} @@ -124,7 +137,23 @@ def _check_model(self): missing_keys = result["missing_keys"] if missing_keys != []: _, provider, _, _ = litellm.get_llm_provider(self._model) - if provider == "openai": + if provider == "azure": + missing_azure_vars = [k for k in missing_keys if k.startswith("AZURE_")] + raise AssistantError( + textwrap.dedent( + f"""\ + You need to set the following Azure OpenAI environment variables: + - AZURE_API_KEY: Your Azure OpenAI API key + - AZURE_API_BASE: Your Azure OpenAI endpoint (e.g., https://YOUR_RESOURCE.openai.azure.com) + - AZURE_API_VERSION: API version (e.g., 2024-02-15-preview) + + Missing variables: {', '.join(missing_azure_vars)} + + For Azure OpenAI, use the deployment name as the model name. + The deployment should be using {self._base_model} or compatible model.""" + ) + ) + elif provider == "openai": raise AssistantError( textwrap.dedent( f"""\ @@ -148,7 +177,7 @@ def _check_model(self): textwrap.dedent( f"""\ The {self._model} model does not support function calls. - You must use a model that does, eg. gpt-4.""" + You must use a deployment with a model that supports function calling, like GPT-4 or GPT-3.5-Turbo.""" ) ) except: @@ -156,7 +185,7 @@ def _check_model(self): textwrap.dedent( f"""\ {self._model} does not appear to be a supported model. - See https://docs.litellm.ai/docs/providers.""" + See https://docs.litellm.ai/docs/providers/azure for Azure OpenAI configuration.""" ) ) @@ -265,19 +294,28 @@ def _streamed_query(self, prompt: str, user_text): return stats def _stream_completion(self): - self._trim_conversation() - return litellm.completion( - model=self._model, - messages=self._conversation, - tools=[ + completion_params = { + "model": self._model, + "messages": self._conversation, + "tools": [ {"type": "function", "function": f["schema"]} for f in self._functions.values() ], - timeout=self._timeout, - stream=True, - ) + "timeout": self._timeout, + "stream": True, + } + + # Add Azure specific configuration if using Azure + if getattr(self, "_using_azure", False): + completion_params.update({ + "api_key": os.getenv("AZURE_API_KEY"), + "api_base": os.getenv("AZURE_API_BASE"), + "api_version": os.getenv("AZURE_API_VERSION"), + }) + + return litellm.completion(**completion_params) def _trim_conversation(self): old_len = litellm.token_counter(self._model, messages=self._conversation) From 25dadf3dd5ec8dfc952ca16ba9d99ba2645ccb37 Mon Sep 17 00:00:00 2001 From: Shahriar Yazdipour Date: Fri, 20 Jun 2025 16:49:36 +0200 Subject: [PATCH 2/4] feat: Refactor Azure OpenAI configuration handling --- README.md | 3 +-- src/chatdbg/assistant/assistant.py | 28 +++++++++------------------- 2 files changed, 10 insertions(+), 21 deletions(-) diff --git a/README.md b/README.md index 79f8dd09..3405adfe 100644 --- a/README.md +++ b/README.md @@ -44,10 +44,9 @@ For technical details and a complete evaluation, see our paper (to appear at FSE > export AZURE_API_KEY= > export AZURE_API_BASE= # e.g., https://YOUR_RESOURCE.openai.azure.com > export AZURE_API_VERSION= # e.g., 2024-02-15-preview +> export CHATDBG_MODEL= # e.g., azure/gpt-4o > ``` > -> When using Azure OpenAI, you should use your deployment name as the model name. The deployment should be using GPT-4 or a compatible model that supports function calling. -> Install ChatDBG using `pip` (you need to do this whether you are debugging Python, C, or C++ code): diff --git a/src/chatdbg/assistant/assistant.py b/src/chatdbg/assistant/assistant.py index ce6dee8d..e20b0e31 100644 --- a/src/chatdbg/assistant/assistant.py +++ b/src/chatdbg/assistant/assistant.py @@ -42,19 +42,6 @@ def __init__( # Hide their debugging info -- it messes with our error handling litellm.suppress_debug_info = True - - # Configure Azure OpenAI if environment variables are present - if all(k in os.environ for k in ["AZURE_API_KEY", "AZURE_API_BASE", "AZURE_API_VERSION"]): - # For Azure OpenAI, model should be the deployment name - if not model.startswith("azure_") and not model.startswith("azure/"): - # Keep track that we're using Azure - self._using_azure = True - # Store original model name for error messages - self._base_model = model - else: - self._using_azure = False - self._base_model = model - self._clients = listeners self._functions = {} @@ -150,7 +137,7 @@ def _check_model(self): Missing variables: {', '.join(missing_azure_vars)} For Azure OpenAI, use the deployment name as the model name. - The deployment should be using {self._base_model} or compatible model.""" + The deployment should be using a compatible model.""" ) ) elif provider == "openai": @@ -185,7 +172,7 @@ def _check_model(self): textwrap.dedent( f"""\ {self._model} does not appear to be a supported model. - See https://docs.litellm.ai/docs/providers/azure for Azure OpenAI configuration.""" + See https://docs.litellm.ai/docs/providers/.""" ) ) @@ -307,13 +294,16 @@ def _stream_completion(self): "stream": True, } - # Add Azure specific configuration if using Azure - if getattr(self, "_using_azure", False): - completion_params.update({ + # Add Azure specific configuration if Azure env vars are present and model looks like an Azure deployment + if all( + k in os.environ + for k in ["AZURE_API_KEY", "AZURE_API_BASE", "AZURE_API_VERSION"] + ) and self._model.startswith("azure"): + completion_params |= { "api_key": os.getenv("AZURE_API_KEY"), "api_base": os.getenv("AZURE_API_BASE"), "api_version": os.getenv("AZURE_API_VERSION"), - }) + } return litellm.completion(**completion_params) From 2ff0d72904e253dc4a13fcc20682ab6fdc4a8b10 Mon Sep 17 00:00:00 2001 From: Shahriar Yazdipour Date: Fri, 20 Jun 2025 16:57:10 +0200 Subject: [PATCH 3/4] fix: revert minor style changes --- src/chatdbg/assistant/assistant.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/chatdbg/assistant/assistant.py b/src/chatdbg/assistant/assistant.py index e20b0e31..e350b57b 100644 --- a/src/chatdbg/assistant/assistant.py +++ b/src/chatdbg/assistant/assistant.py @@ -42,6 +42,7 @@ def __init__( # Hide their debugging info -- it messes with our error handling litellm.suppress_debug_info = True + self._clients = listeners self._functions = {} @@ -164,7 +165,7 @@ def _check_model(self): textwrap.dedent( f"""\ The {self._model} model does not support function calls. - You must use a deployment with a model that supports function calling, like GPT-4 or GPT-3.5-Turbo.""" + You must use a model that does, eg. gpt-4.""" ) ) except: @@ -172,7 +173,7 @@ def _check_model(self): textwrap.dedent( f"""\ {self._model} does not appear to be a supported model. - See https://docs.litellm.ai/docs/providers/.""" + See https://docs.litellm.ai/docs/providers.""" ) ) From 2011497db875f4d7006be635e972e59942ea4961 Mon Sep 17 00:00:00 2001 From: Shahriar Yazdipour Date: Fri, 20 Jun 2025 17:17:52 +0200 Subject: [PATCH 4/4] feat: Add CHATDBG_MODEL environment variable requirement for Azure OpenAI configuration --- src/chatdbg/assistant/assistant.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/chatdbg/assistant/assistant.py b/src/chatdbg/assistant/assistant.py index e350b57b..82393f57 100644 --- a/src/chatdbg/assistant/assistant.py +++ b/src/chatdbg/assistant/assistant.py @@ -134,6 +134,7 @@ def _check_model(self): - AZURE_API_KEY: Your Azure OpenAI API key - AZURE_API_BASE: Your Azure OpenAI endpoint (e.g., https://YOUR_RESOURCE.openai.azure.com) - AZURE_API_VERSION: API version (e.g., 2024-02-15-preview) + - CHATDBG_MODEL: The model name, which should be in the format azure/ Missing variables: {', '.join(missing_azure_vars)}