Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
72 changes: 72 additions & 0 deletions client-sdks/stainless/openapi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5574,11 +5574,44 @@ components:
oneOf:
- $ref: '#/components/schemas/OpenAIResponseInputMessageContentText'
- $ref: '#/components/schemas/OpenAIResponseInputMessageContentImage'
- $ref: '#/components/schemas/OpenAIResponseInputMessageContentFile'
discriminator:
propertyName: type
mapping:
input_text: '#/components/schemas/OpenAIResponseInputMessageContentText'
input_image: '#/components/schemas/OpenAIResponseInputMessageContentImage'
input_file: '#/components/schemas/OpenAIResponseInputMessageContentFile'
OpenAIResponseInputMessageContentFile:
type: object
properties:
type:
type: string
const: input_file
default: input_file
description: >-
The type of the input item. Always `input_file`.
file_data:
type: string
description: >-
The data of the file to be sent to the model.
file_id:
type: string
description: >-
(Optional) The ID of the file to be sent to the model.
file_url:
type: string
description: >-
The URL of the file to be sent to the model.
filename:
type: string
description: >-
The name of the file to be sent to the model.
additionalProperties: false
required:
- type
title: OpenAIResponseInputMessageContentFile
description: >-
File content for input messages in OpenAI response format.
OpenAIResponseInputMessageContentImage:
type: object
properties:
Expand All @@ -5599,6 +5632,10 @@ components:
default: input_image
description: >-
Content type identifier, always "input_image"
file_id:
type: string
description: >-
(Optional) The ID of the file to be sent to the model.
image_url:
type: string
description: (Optional) URL of the image content
Expand Down Expand Up @@ -6998,6 +7035,10 @@ components:
type: string
description: >-
(Optional) ID of the previous response in a conversation
prompt:
$ref: '#/components/schemas/Prompt'
description: >-
(Optional) Prompt object with ID, version, and variables
status:
type: string
description: >-
Expand Down Expand Up @@ -7315,6 +7356,29 @@ components:
title: OpenAIResponseInputToolMCP
description: >-
Model Context Protocol (MCP) tool configuration for OpenAI response inputs.
OpenAIResponsePromptParam:
type: object
properties:
id:
type: string
description: Unique identifier of the prompt template
variables:
type: object
additionalProperties:
$ref: '#/components/schemas/OpenAIResponseInputMessageContent'
description: >-
Dictionary of variable names to OpenAIResponseInputMessageContent structure
for template substitution
version:
type: string
description: >-
Version number of the prompt to use (defaults to latest if not specified)
additionalProperties: false
required:
- id
title: OpenAIResponsePromptParam
description: >-
Prompt object that is used for OpenAI responses.
CreateOpenaiResponseRequest:
type: object
properties:
Expand All @@ -7328,6 +7392,10 @@ components:
model:
type: string
description: The underlying LLM used for completions.
prompt:
$ref: '#/components/schemas/OpenAIResponsePromptParam'
description: >-
Prompt object with ID, version, and variables.
instructions:
type: string
previous_response_id:
Expand Down Expand Up @@ -7405,6 +7473,10 @@ components:
type: string
description: >-
(Optional) ID of the previous response in a conversation
prompt:
$ref: '#/components/schemas/Prompt'
description: >-
(Optional) Prompt object with ID, version, and variables
status:
type: string
description: >-
Expand Down
119 changes: 118 additions & 1 deletion docs/static/deprecated-llama-stack-spec.html
Original file line number Diff line number Diff line change
Expand Up @@ -8593,16 +8593,53 @@
},
{
"$ref": "#/components/schemas/OpenAIResponseInputMessageContentImage"
},
{
"$ref": "#/components/schemas/OpenAIResponseInputMessageContentFile"
}
],
"discriminator": {
"propertyName": "type",
"mapping": {
"input_text": "#/components/schemas/OpenAIResponseInputMessageContentText",
"input_image": "#/components/schemas/OpenAIResponseInputMessageContentImage"
"input_image": "#/components/schemas/OpenAIResponseInputMessageContentImage",
"input_file": "#/components/schemas/OpenAIResponseInputMessageContentFile"
}
}
},
"OpenAIResponseInputMessageContentFile": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "input_file",
"default": "input_file",
"description": "The type of the input item. Always `input_file`."
},
"file_data": {
"type": "string",
"description": "The data of the file to be sent to the model."
},
"file_id": {
"type": "string",
"description": "(Optional) The ID of the file to be sent to the model."
},
"file_url": {
"type": "string",
"description": "The URL of the file to be sent to the model."
},
"filename": {
"type": "string",
"description": "The name of the file to be sent to the model."
}
},
"additionalProperties": false,
"required": [
"type"
],
"title": "OpenAIResponseInputMessageContentFile",
"description": "File content for input messages in OpenAI response format."
},
"OpenAIResponseInputMessageContentImage": {
"type": "object",
"properties": {
Expand Down Expand Up @@ -8630,6 +8667,10 @@
"default": "input_image",
"description": "Content type identifier, always \"input_image\""
},
"file_id": {
"type": "string",
"description": "(Optional) The ID of the file to be sent to the model."
},
"image_url": {
"type": "string",
"description": "(Optional) URL of the image content"
Expand Down Expand Up @@ -8993,6 +9034,10 @@
"type": "string",
"description": "(Optional) ID of the previous response in a conversation"
},
"prompt": {
"$ref": "#/components/schemas/Prompt",
"description": "(Optional) Prompt object with ID, version, and variables"
},
"status": {
"type": "string",
"description": "Current status of the response generation"
Expand Down Expand Up @@ -9610,6 +9655,44 @@
"title": "OpenAIResponseUsage",
"description": "Usage information for OpenAI response."
},
"Prompt": {
"type": "object",
"properties": {
"prompt": {
"type": "string",
"description": "The system prompt text with variable placeholders. Variables are only supported when using the Responses API."
},
"version": {
"type": "integer",
"description": "Version (integer starting at 1, incremented on save)"
},
"prompt_id": {
"type": "string",
"description": "Unique identifier formatted as 'pmpt_<48-digit-hash>'"
},
"variables": {
"type": "array",
"items": {
"type": "string"
},
"description": "List of prompt variable names that can be used in the prompt template"
},
"is_default": {
"type": "boolean",
"default": false,
"description": "Boolean indicating whether this version is the default version for this prompt"
}
},
"additionalProperties": false,
"required": [
"version",
"prompt_id",
"variables",
"is_default"
],
"title": "Prompt",
"description": "A prompt resource representing a stored OpenAI Compatible prompt template in Llama Stack."
},
"ResponseGuardrailSpec": {
"type": "object",
"properties": {
Expand Down Expand Up @@ -9766,6 +9849,32 @@
"title": "OpenAIResponseInputToolMCP",
"description": "Model Context Protocol (MCP) tool configuration for OpenAI response inputs."
},
"OpenAIResponsePromptParam": {
"type": "object",
"properties": {
"id": {
"type": "string",
"description": "Unique identifier of the prompt template"
},
"variables": {
"type": "object",
"additionalProperties": {
"$ref": "#/components/schemas/OpenAIResponseInputMessageContent"
},
"description": "Dictionary of variable names to OpenAIResponseInputMessageContent structure for template substitution"
},
"version": {
"type": "string",
"description": "Version number of the prompt to use (defaults to latest if not specified)"
}
},
"additionalProperties": false,
"required": [
"id"
],
"title": "OpenAIResponsePromptParam",
"description": "Prompt object that is used for OpenAI responses."
},
"CreateOpenaiResponseRequest": {
"type": "object",
"properties": {
Expand All @@ -9787,6 +9896,10 @@
"type": "string",
"description": "The underlying LLM used for completions."
},
"prompt": {
"$ref": "#/components/schemas/OpenAIResponsePromptParam",
"description": "Prompt object with ID, version, and variables."
},
"instructions": {
"type": "string"
},
Expand Down Expand Up @@ -9875,6 +9988,10 @@
"type": "string",
"description": "(Optional) ID of the previous response in a conversation"
},
"prompt": {
"$ref": "#/components/schemas/Prompt",
"description": "(Optional) Prompt object with ID, version, and variables"
},
"status": {
"type": "string",
"description": "Current status of the response generation"
Expand Down
Loading
Loading