self.new

in rbi/lib/openai/models/responses/response_create_params.rbi [316:395]


        def self.new(
          input:,
          model:,
          include: nil,
          instructions: nil,
          max_output_tokens: nil,
          metadata: nil,
          parallel_tool_calls: nil,
          previous_response_id: nil,
          reasoning: nil,
          service_tier: nil,
          store: nil,
          temperature: nil,
          text: nil,
          tool_choice: nil,
          tools: nil,
          top_p: nil,
          truncation: nil,
          user: nil,
          request_options: {}
        ); end
        sig do
          override
            .returns(
              {
                input: T.any(
                  String,
                  T::Array[
                    T.any(
                      OpenAI::Models::Responses::EasyInputMessage,
                      OpenAI::Models::Responses::ResponseInputItem::Message,
                      OpenAI::Models::Responses::ResponseOutputMessage,
                      OpenAI::Models::Responses::ResponseFileSearchToolCall,
                      OpenAI::Models::Responses::ResponseComputerToolCall,
                      OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput,
                      OpenAI::Models::Responses::ResponseFunctionWebSearch,
                      OpenAI::Models::Responses::ResponseFunctionToolCall,
                      OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput,
                      OpenAI::Models::Responses::ResponseReasoningItem,
                      OpenAI::Models::Responses::ResponseInputItem::ItemReference
                    )
                  ]
                ),
                model: T.any(
                  String,
                  OpenAI::Models::ChatModel::OrSymbol,
                  OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol
                ),
                include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]),
                instructions: T.nilable(String),
                max_output_tokens: T.nilable(Integer),
                metadata: T.nilable(T::Hash[Symbol, String]),
                parallel_tool_calls: T.nilable(T::Boolean),
                previous_response_id: T.nilable(String),
                reasoning: T.nilable(OpenAI::Models::Reasoning),
                service_tier: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::OrSymbol),
                store: T.nilable(T::Boolean),
                temperature: T.nilable(Float),
                text: OpenAI::Models::Responses::ResponseTextConfig,
                tool_choice: T.any(
                  OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol,
                  OpenAI::Models::Responses::ToolChoiceTypes,
                  OpenAI::Models::Responses::ToolChoiceFunction
                ),
                tools: T::Array[
                  T.any(
                    OpenAI::Models::Responses::FileSearchTool,
                    OpenAI::Models::Responses::FunctionTool,
                    OpenAI::Models::Responses::ComputerTool,
                    OpenAI::Models::Responses::WebSearchTool
                  )
                ],
                top_p: T.nilable(Float),
                truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol),
                user: String,
                request_options: OpenAI::RequestOptions
              }
            )
        end