Skip to content

Chore/type checking types 01 #105

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 13 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ on:
branches:
- feature/*
- feat/*
- chore/*
- bugfix/*
- hotfix/*
- fix/*
Expand Down
6 changes: 4 additions & 2 deletions dapr_agents/types/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,13 +70,15 @@ class AgentActorState(BaseModel):
"""Represents the state of an agent, tracking message history, task history, and overall status."""

messages: Optional[List[AgentActorMessage]] = Field(
default_factory=list, description="History of messages exchanged by the agent"
default_factory=list[AgentActorMessage],
description="History of messages exchanged by the agent",
)
message_count: int = Field(
0, description="Total number of messages exchanged by the agent"
)
task_history: Optional[List[AgentTaskEntry]] = Field(
default_factory=list, description="History of tasks the agent has performed"
default_factory=list[AgentTaskEntry],
description="History of tasks the agent has performed",
)
overall_status: AgentStatus = Field(
AgentStatus.IDLE, description="Current operational status of the agent"
Expand Down
39 changes: 31 additions & 8 deletions dapr_agents/types/llm.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import List, Union, Optional, Dict, Any, Literal, IO, Tuple
from typing import List, Union, Optional, Dict, Any, Literal, IO, Tuple, cast
from pydantic import BaseModel, Field, model_validator, field_validator, ConfigDict
from pydantic_core import PydanticUseDefault
from pathlib import Path
Expand Down Expand Up @@ -143,7 +143,7 @@ class OpenAIModelConfig(OpenAIClientConfig):
type: Literal["openai"] = Field(
"openai", description="Type of the model, must always be 'openai'"
)
name: str = Field(default=None, description="Name of the OpenAI model")
name: str = Field(default="", description="Name of the OpenAI model")


class AzureOpenAIModelConfig(AzureOpenAIClientConfig):
Expand All @@ -157,7 +157,7 @@ class HFHubModelConfig(HFInferenceClientConfig):
"huggingface", description="Type of the model, must always be 'huggingface'"
)
name: str = Field(
default=None, description="Name of the model available through Hugging Face"
default="", description="Name of the model available through Hugging Face"
)


Expand All @@ -166,7 +166,7 @@ class NVIDIAModelConfig(NVIDIAClientConfig):
"nvidia", description="Type of the model, must always be 'nvidia'"
)
name: str = Field(
default=None, description="Name of the model available through NVIDIA"
default="", description="Name of the model available through NVIDIA"
)


Expand Down Expand Up @@ -412,6 +412,14 @@ def sync_model_name(cls, values: dict):
elif configuration.get("type") == "nvidia":
configuration = NVIDIAModelConfig(**configuration)

configuration = cast(
OpenAIModelConfig
| AzureOpenAIModelConfig
| HFHubModelConfig
| NVIDIAModelConfig,
configuration,
)

# Ensure 'parameters' is properly validated as a model, not a dict
if isinstance(parameters, dict):
if configuration and isinstance(configuration, OpenAIModelConfig):
Expand All @@ -423,12 +431,27 @@ def sync_model_name(cls, values: dict):
elif configuration and isinstance(configuration, NVIDIAModelConfig):
parameters = NVIDIAChatCompletionParams(**parameters)

parameters = cast(
OpenAIChatCompletionParams
| HFHubChatCompletionParams
| NVIDIAChatCompletionParams,
parameters,
)

if configuration and parameters:
# Check if 'name' or 'azure_deployment' is explicitly set
if "name" in configuration.model_fields_set:
parameters.model = configuration.name
parameters.model = (
configuration.name
if not isinstance(configuration, AzureOpenAIModelConfig)
else None
)
elif "azure_deployment" in configuration.model_fields_set:
parameters.model = configuration.azure_deployment
parameters.model = (
configuration.azure_deployment
if isinstance(configuration, AzureOpenAIModelConfig)
else None
)

values["configuration"] = configuration
values["parameters"] = parameters
Expand Down Expand Up @@ -550,7 +573,7 @@ def validate_file(
elif isinstance(value, BufferedReader) or (
hasattr(value, "read") and callable(value.read)
):
if value.closed:
if hasattr(value, "closed") and value.closed:
raise ValueError("File-like object must remain open during request.")
return value
elif isinstance(value, tuple):
Expand Down Expand Up @@ -614,7 +637,7 @@ def validate_file(
elif isinstance(value, BufferedReader) or (
hasattr(value, "read") and callable(value.read)
):
if value.closed: # Reopen if closed
if hasattr(value, "closed") and value.closed: # Reopen if closed
raise ValueError("File-like object must remain open during request.")
return value
elif isinstance(value, tuple):
Expand Down
3 changes: 2 additions & 1 deletion dapr_agents/types/message.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from typing import Any
from pydantic import (
BaseModel,
field_validator,
Expand Down Expand Up @@ -183,7 +184,7 @@ class ChatCompletion(BaseModel):
object: Optional[str] = None
usage: dict

def get_message(self) -> Optional[str]:
def get_message(self) -> Optional[Dict[str, Any]]:
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I do not think this is correct because getting Content is just a plain string right?

def get_content(self) -> Optional[str]:
        """
        Retrieve the content from the first choice's message.
        """
        message = self.get_message()
        return message.get("content") if message else None

A Response Example -> Choices -> 0 -> Message -> Content right?

{
  "id": "chatcmpl-B9MBs8CjcvOU2jLn4n570S5qMJKcT",
  "object": "chat.completion",
  "created": 1741569952,
  "model": "gpt-4.1-2025-04-14",
  "choices": [
    {
      "index": 0,
      "message": {
        "role": "assistant",
        "content": "Hello! How can I assist you today?",
        "refusal": null,
        "annotations": []
      },
      "logprobs": null,
      "finish_reason": "stop"
    }
  ],
  "usage": {
    "prompt_tokens": 19,
    "completion_tokens": 10,
    "total_tokens": 29,
    "prompt_tokens_details": {
      "cached_tokens": 0,
      "audio_tokens": 0
    },
    "completion_tokens_details": {
      "reasoning_tokens": 0,
      "audio_tokens": 0,
      "accepted_prediction_tokens": 0,
      "rejected_prediction_tokens": 0
    }
  },
  "service_tier": "default"
}

So having Opetional[str] is the right thing here and no Optional[Dict[str, Any]] right?

Copy link
Contributor Author

@CasperGN CasperGN Apr 27, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@Cyb3rWard0g yes, the content from get_content returns a plain string howeveer get_message returns from .model_dump() which returns a Dict[str, Any] :

    def model_dump(
        self,
        *,
        mode: Literal['json', 'python'] | str = 'python',
        include: IncEx | None = None,
        exclude: IncEx | None = None,
        context: Any | None = None,
        by_alias: bool | None = None,
        exclude_unset: bool = False,
        exclude_defaults: bool = False,
        exclude_none: bool = False,
        round_trip: bool = False,
        warnings: bool | Literal['none', 'warn', 'error'] = True,
        fallback: Callable[[Any], Any] | None = None,
        serialize_as_any: bool = False,
    ) -> dict[str, Any]:
...

This then also ensures that when get_content is fetched (which calls get_message) the .get("content") works since it's a dict (well, unless a None was returned).

"""
Retrieve the main message content from the first choice.
"""
Expand Down
3 changes: 0 additions & 3 deletions mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,5 @@ ignore_errors = True
[mypy-dapr_agents.tool.*]
ignore_errors = True

[mypy-dapr_agents.types.*]
ignore_errors = True

[mypy-dapr_agents.workflow.*]
ignore_errors = True
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,5 @@ pyyaml==6.0.2
rich==13.9.4
huggingface_hub==0.30.2
numpy==2.2.2
mcp==1.6.0
mcp==1.6.0
dotenv==0.9.9