Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "zai-sdk"
version = "0.0.3.6"
version = "0.0.4"
description = "A SDK library for accessing big model apis from Z.ai"
authors = ["Z.ai"]
readme = "README.md"
Expand Down
2 changes: 1 addition & 1 deletion src/zai/_version.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
__title__ = 'Z.ai'
__version__ = '0.0.3.6'
__version__ = '0.0.4'
19 changes: 0 additions & 19 deletions src/zai/api_resource/chat/async_completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,25 +88,6 @@ def create(
watermark_enabled (Optional[bool]): Whether to enable watermark on generated audio
"""
_cast_type = AsyncTaskStatus
logger.debug(f'temperature:{temperature}, top_p:{top_p}')
if temperature is not None and temperature != NOT_GIVEN:
if temperature <= 0:
do_sample = False
temperature = 0.01
# logger.warning("temperature: value range is (0.0, 1.0) open interval,"
# "do_sample rewritten as false (parameters top_p temperature do not take effect)")
if temperature >= 1:
temperature = 0.99
# logger.warning("temperature: value range is (0.0, 1.0) open interval")
if top_p is not None and top_p != NOT_GIVEN:
if top_p >= 1:
top_p = 0.99
# logger.warning("top_p: value range is (0.0, 1.0) open interval, cannot equal 0 or 1")
if top_p <= 0:
top_p = 0.01
# logger.warning("top_p: value range is (0.0, 1.0) open interval, cannot equal 0 or 1")

logger.debug(f'temperature:{temperature}, top_p:{top_p}')
if isinstance(messages, List):
for item in messages:
if item.get('content'):
Expand Down
3 changes: 3 additions & 0 deletions src/zai/api_resource/chat/completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ def create(
response_format: object | None = None,
thinking: object | None = None,
watermark_enabled: Optional[bool] | NotGiven = NOT_GIVEN,
tool_stream: bool | NotGiven = NOT_GIVEN,
) -> Completion | StreamResponse[ChatCompletionChunk]:
"""
Create a chat completion
Expand Down Expand Up @@ -93,6 +94,7 @@ def create(
response_format (object): Response format specification
thinking (Optional[object]): Configuration parameters for model reasoning
watermark_enabled (Optional[bool]): Whether to enable watermark on generated audio
tool_stream (Optional[bool]): Whether to enable tool streaming
"""
logger.debug(f'temperature:{temperature}, top_p:{top_p}')
if temperature is not None and temperature != NOT_GIVEN:
Expand Down Expand Up @@ -141,6 +143,7 @@ def create(
'response_format': response_format,
'thinking': thinking,
'watermark_enabled': watermark_enabled,
'tool_stream': tool_stream,
}
)
return self._post(
Expand Down
Loading