Skip to content

Commit

Permalink
Support to load dict from JSON file; Removing the post_api since it's…
Browse files Browse the repository at this point in the history
… abstract class that cannot be used directly.
  • Loading branch information
DavdGao committed Jan 4, 2025
1 parent 38a411e commit 7f9cd35
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 70 deletions.
4 changes: 2 additions & 2 deletions docs/sphinx_doc/en/source/tutorial/203-model.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ In the current AgentScope, the supported `model_type` types, the corresponding
| | Generation | [`OllamaGenerationWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/ollama_model.py) | `"ollama_generate"` | llama2, ... |
| LiteLLM API | Chat | [`LiteLLMChatWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/litellm_model.py) | `"litellm_chat"` | - |
| Yi API | Chat | [`YiChatWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/yi_model.py) | `"yi_chat"` | yi-large, yi-medium, ... |
| Post Request based API | - | [`PostAPIModelWrapperBase`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/post_model.py) | `"post_api"` | - |
| Post Request based API | - | [`PostAPIModelWrapperBase`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/post_model.py) | - | - |
| | Chat | [`PostAPIChatWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/post_model.py) | `"post_api_chat"` | meta-llama/Meta-Llama-3-8B-Instruct, ... |
| | Image Synthesis | [`PostAPIDALLEWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/post_model.py) | `post_api_dall_e` | - | |
| | Embedding | [`PostAPIEmbeddingWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/post_model.py) | `post_api_embedding` | - |
Expand Down Expand Up @@ -519,7 +519,7 @@ com/modelscope/agentscope/blob/main/src/agentscope/models/litellm_model.py">agen
```python
{
"config_name": "my_postapiwrapper_config",
"model_type": "post_api",
"model_type": "post_api_chat",

# Required parameters
"api_url": "https://xxx.xxx",
Expand Down
4 changes: 2 additions & 2 deletions docs/sphinx_doc/zh_CN/source/tutorial/203-model.md
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ API如下:
| | Generation | [`OllamaGenerationWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/ollama_model.py) | `"ollama_generate"` | llama2, ... |
| LiteLLM API | Chat | [`LiteLLMChatWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/litellm_model.py) | `"litellm_chat"` | - |
| Yi API | Chat | [`YiChatWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/yi_model.py) | `"yi_chat"` | yi-large, yi-medium, ... |
| Post Request based API | - | [`PostAPIModelWrapperBase`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/post_model.py) | `"post_api"` | - |
| Post Request based API | - | [`PostAPIModelWrapperBase`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/post_model.py) | - | - |
| | Chat | [`PostAPIChatWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/post_model.py) | `"post_api_chat"` | meta-llama/Meta-Llama-3-8B-Instruct, ... |
| | Image Synthesis | [`PostAPIDALLEWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/post_model.py) | `post_api_dall_e` | - | |
| | Embedding | [`PostAPIEmbeddingWrapper`](https://github.com/modelscope/agentscope/blob/main/src/agentscope/models/post_model.py) | `post_api_embedding` | - |
Expand Down Expand Up @@ -540,7 +540,7 @@ com/modelscope/agentscope/blob/main/src/agentscope/models/litellm_model.py">agen
```python
{
"config_name": "my_postapiwrapper_config",
"model_type": "post_api",
"model_type": "post_api_chat",

# 必要参数
"api_url": "https://xxx.xxx",
Expand Down
24 changes: 11 additions & 13 deletions src/agentscope/manager/_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,30 +89,28 @@ def load_model_configs(
if clear_existing:
self.clear_model_configs()

cfgs = None
cfgs = model_configs

# Load model configs from a path
if isinstance(model_configs, str):
if not os.path.exists(model_configs):
if isinstance(cfgs, str):
if not os.path.exists(cfgs):
raise FileNotFoundError(
f"Cannot find the model configs file in the given path "
f"`{model_configs}`.",
)
with open(model_configs, "r", encoding="utf-8") as f:
with open(cfgs, "r", encoding="utf-8") as f:
cfgs = json.load(f)

# Load model configs from a dict or a list of dicts
if isinstance(model_configs, dict):
cfgs = [model_configs]
if isinstance(cfgs, dict):
cfgs = [cfgs]

if isinstance(model_configs, list):
if not all(isinstance(_, dict) for _ in model_configs):
if isinstance(cfgs, list):
if not all(isinstance(_, dict) for _ in cfgs):
raise ValueError(
"The model config unit should be a dict.",
)
cfgs = model_configs

if cfgs is None:
else:
raise TypeError(
f"Invalid type of model_configs, it could be a dict, a list "
f"of dicts, or a path to a json file (containing a dict or a "
Expand Down Expand Up @@ -163,15 +161,15 @@ def get_model_by_config_name(self, config_name: str) -> ModelWrapperBase:
)

model_type = config["model_type"]
kwargs = {k: v for k, v in config.items() if k != "model_type"}

if model_type not in self.model_wrapper_mapping:
raise ValueError(
f"Unsupported model_type `{model_type}`, currently supported "
f"model types: "
f"{', '.join(list(self.model_wrapper_mapping.keys()))}. ",
)

kwargs = {k: v for k, v in config.items() if k != "model_type"}

return self.model_wrapper_mapping[model_type](**kwargs)

def get_config_by_name(self, config_name: str) -> Union[dict, None]:
Expand Down
54 changes: 1 addition & 53 deletions src/agentscope/models/model.py
Original file line number Diff line number Diff line change
@@ -1,58 +1,6 @@
# -*- coding: utf-8 -*-
"""The configuration file should contain one or a list of model configs,
and each model config should follow the following format.
"""The model wrapper base class."""

.. code-block:: python
{
"config_name": "{config_name}",
"model_type": "openai_chat" | "post_api" | ...,
...
}
After that, you can specify model by {config_name}.
Note:
The parameters for different types of models are different. For OpenAI API,
the format is:
.. code-block:: python
{
"config_name": "{id of your model}",
"model_type": "openai_chat",
"model_name": "{model_name_for_openai, e.g. gpt-3.5-turbo}",
"api_key": "{your_api_key}",
"organization": "{your_organization, if needed}",
"client_args": {
# ...
},
"generate_args": {
# ...
}
}
For Post API, toking huggingface inference API as an example, its format
is:
.. code-block:: python
{
"config_name": "{config_name}",
"model_type": "post_api",
"api_url": "{api_url}",
"headers": {"Authorization": "Bearer {API_TOKEN}"},
"max_length": {max_length_of_model},
"timeout": {timeout},
"max_retries": {max_retries},
"generate_args": {
"temperature": 0.5,
# ...
}
}
"""
from __future__ import annotations
import inspect
import time
Expand Down

0 comments on commit 7f9cd35

Please sign in to comment.