in rbi/lib/openai/models/beta/thread_create_and_run_params.rbi [232:293]
def self.new(
assistant_id:,
instructions: nil,
max_completion_tokens: nil,
max_prompt_tokens: nil,
metadata: nil,
model: nil,
parallel_tool_calls: nil,
response_format: nil,
temperature: nil,
thread: nil,
tool_choice: nil,
tool_resources: nil,
tools: nil,
top_p: nil,
truncation_strategy: nil,
request_options: {}
); end
sig do
override
.returns(
{
assistant_id: String,
instructions: T.nilable(String),
max_completion_tokens: T.nilable(Integer),
max_prompt_tokens: T.nilable(Integer),
metadata: T.nilable(T::Hash[Symbol, String]),
model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)),
parallel_tool_calls: T::Boolean,
response_format: T.nilable(
T.any(
Symbol,
OpenAI::Models::ResponseFormatText,
OpenAI::Models::ResponseFormatJSONObject,
OpenAI::Models::ResponseFormatJSONSchema
)
),
temperature: T.nilable(Float),
thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread,
tool_choice: T.nilable(
T.any(
OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol,
OpenAI::Models::Beta::AssistantToolChoice
)
),
tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources),
tools: T.nilable(
T::Array[
T.any(
OpenAI::Models::Beta::CodeInterpreterTool,
OpenAI::Models::Beta::FileSearchTool,
OpenAI::Models::Beta::FunctionTool
)
]
),
top_p: T.nilable(Float),
truncation_strategy: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy),
request_options: OpenAI::RequestOptions
}
)
end