Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 13 additions & 3 deletions lightllm/server/api_models.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import time
import uuid

from pydantic import BaseModel, Field, field_validator, model_validator
from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator
from typing import Any, Dict, List, Optional, Union, Literal, ClassVar
from transformers import GenerationConfig

Expand All @@ -21,6 +21,14 @@ class Message(BaseModel):
content: Union[str, List[MessageContent]]


class CharacterMessage(BaseModel):
"""Message format for character-based chat, where role is inferred from name."""

name: str
content: Union[str, List[MessageContent]]
role: Optional[str] = None # Optional, can be inferred from role_setting


class Function(BaseModel):
"""Function descriptions."""

Expand Down Expand Up @@ -105,7 +113,7 @@ def _normalize_role(cls, v):
raise ValueError("'role' must be a string")


ChatCompletionMessageParam = Union[ChatCompletionMessageGenericParam, Message]
ChatCompletionMessageParam = Union[ChatCompletionMessageGenericParam, Message, CharacterMessage]


class CompletionRequest(BaseModel):
Expand Down Expand Up @@ -176,6 +184,8 @@ def apply_loaded_defaults(cls, data: Any):


class ChatCompletionRequest(BaseModel):
model_config = ConfigDict(populate_by_name=True)

model: str
messages: List[ChatCompletionMessageParam]
function_call: Optional[str] = "none"
Expand Down Expand Up @@ -216,7 +226,7 @@ class ChatCompletionRequest(BaseModel):
top_k: Optional[int] = -1
repetition_penalty: Optional[float] = 1.0
ignore_eos: Optional[bool] = False
role_settings: Optional[Dict[str, str]] = None
role_settings: Optional[Dict[str, str]] = Field(default=None, alias="role_setting")
character_settings: Optional[List[Dict[str, str]]] = None

# Class variables to store loaded default values
Expand Down
3 changes: 2 additions & 1 deletion lightllm/server/api_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,8 @@ def _get_history_tool_calls_cnt(request: ChatCompletionRequest) -> int:
messages = getattr(request, "messages", [])
idx = 0
for msg in messages:
if msg.role == "assistant":
role = getattr(msg, "role", None)
if role == "assistant":
tool_calls = getattr(msg, "tool_calls", None)
idx += len(list(tool_calls)) if tool_calls is not None else 0 # noqa
return idx
Expand Down
6 changes: 5 additions & 1 deletion lightllm/server/build_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,11 @@ async def build_prompt(request, tools) -> str:
global tokenizer
# pydantic格式转成dict, 否则,当根据tokenizer_config.json拼template时,Jinja判断无法识别
messages = [m.model_dump(by_alias=True, exclude_none=True) for m in request.messages]
kwargs = {"conversation": messages}
kwargs = {
"conversation": messages,
# 假设 request 对象里有这个字段,或者你想传空
"system_instruction": getattr(request, "system_instruction", ""),
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The code attempts to access request.system_instruction, but this field is not defined in the ChatCompletionRequest model in api_models.py. This will always result in an empty string "" being used due to getattr, making this new parameter ineffective.

To properly implement this feature, you should add system_instruction as an optional field to the ChatCompletionRequest model in lightllm/server/api_models.py.

For example:

# In lightllm/server/api_models.py
class ChatCompletionRequest(BaseModel):
    # ...
    messages: List[ChatCompletionMessageParam]
    system_instruction: Optional[str] = None
    # ...

Additionally, the Chinese comment # 假设 request 对象里有这个字段,或者你想传空 is informal. It would be better to remove it once the feature is fully implemented, or replace it with a formal English comment explaining the purpose of system_instruction.

}
if request.character_settings:
kwargs["character_settings"] = request.character_settings
if request.role_settings:
Expand Down