create

in rbi/lib/openai/resources/beta/threads/runs.rbi [66:265]


          def create(
            
            thread_id,
            
            
            
            assistant_id:,
            
            
            
            
            
            
            
            
            include: nil,
            
            
            
            additional_instructions: nil,
            
            additional_messages: nil,
            
            
            
            instructions: nil,
            
            
            
            
            
            max_completion_tokens: nil,
            
            
            
            
            
            max_prompt_tokens: nil,
            
            
            
            
            
            
            metadata: nil,
            
            
            
            
            
            model: nil,
            
            
            
            parallel_tool_calls: nil,
            
            
            
            
            
            
            reasoning_effort: nil,
            
            
            
            
            
            
            
            
            
            
            
            
            
            
            
            
            
            
            
            
            response_format: nil,
            
            
            
            temperature: nil,
            
            
            
            
            
            
            
            
            tool_choice: nil,
            
            
            tools: nil,
            
            
            
            
            
            
            top_p: nil,
            
            
            truncation_strategy: nil,
            
            
            stream: false,
            request_options: {}
          ); end
          
          
          
          
          sig do
            params(
              thread_id: String,
              assistant_id: String,
              include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol],
              additional_instructions: T.nilable(String),
              additional_messages: T.nilable(
                T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Internal::AnyHash)]
              ),
              instructions: T.nilable(String),
              max_completion_tokens: T.nilable(Integer),
              max_prompt_tokens: T.nilable(Integer),
              metadata: T.nilable(T::Hash[Symbol, String]),
              model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)),
              parallel_tool_calls: T::Boolean,
              reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol),
              response_format: T.nilable(
                T.any(
                  Symbol,
                  OpenAI::Models::ResponseFormatText,
                  OpenAI::Internal::AnyHash,
                  OpenAI::Models::ResponseFormatJSONObject,
                  OpenAI::Models::ResponseFormatJSONSchema
                )
              ),
              temperature: T.nilable(Float),
              tool_choice: T.nilable(
                T.any(
                  OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol,
                  OpenAI::Models::Beta::AssistantToolChoice,
                  OpenAI::Internal::AnyHash
                )
              ),
              tools: T.nilable(
                T::Array[
                  T.any(
                    OpenAI::Models::Beta::CodeInterpreterTool,
                    OpenAI::Internal::AnyHash,
                    OpenAI::Models::Beta::FileSearchTool,
                    OpenAI::Models::Beta::FunctionTool
                  )
                ]
              ),
              top_p: T.nilable(Float),
              truncation_strategy: T.nilable(
                T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::AnyHash)
              ),
              stream: T.noreturn,
              request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash))
            )
              .returns(
                OpenAI::Internal::Stream[
                  T.any(
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted,
                    OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete,
                    OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent
                  )
                ]
              )
          end