create

in rbi/lib/openai/resources/chat/completions.rbi [88:361]


        def create(
          
          
          
          
          
          
          messages:,
          
          
          
          
          
          model:,
          
          
          
          audio: nil,
          
          
          
          frequency_penalty: nil,
          
          
          
          
          
          
          
          
          
          
          
          
          
          
          function_call: nil,
          
          
          
          functions: nil,
          
          
          
          
          
          
          
          
          logit_bias: nil,
          
          
          
          logprobs: nil,
          
          
          
          max_completion_tokens: nil,
          
          
          
          
          
          
          
          max_tokens: nil,
          
          
          
          
          
          
          metadata: nil,
          
          
          
          
          
          
          
          
          
          
          modalities: nil,
          
          
          
          n: nil,
          
          
          
          parallel_tool_calls: nil,
          
          
          prediction: nil,
          
          
          
          presence_penalty: nil,
          
          
          
          
          
          
          reasoning_effort: nil,
          
          
          
          
          
          
          
          
          
          
          response_format: nil,
          
          
          
          
          
          seed: nil,
          
          
          
          
          
          
          
          
          
          
          
          
          
          
          
          
          
          service_tier: nil,
          
          
          
          
          stop: nil,
          
          
          
          store: nil,
          
          stream_options: nil,
          
          
          
          
          temperature: nil,
          
          
          
          
          
          
          
          
          
          tool_choice: nil,
          
          
          
          tools: nil,
          
          
          
          top_logprobs: nil,
          
          
          
          
          
          top_p: nil,
          
          
          
          user: nil,
          
          
          
          web_search_options: nil,
          
          
          stream: false,
          request_options: {}
        ); end
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        
        sig do
          params(
            messages: T::Array[
              T.any(
                OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam,
                OpenAI::Internal::AnyHash,
                OpenAI::Models::Chat::ChatCompletionSystemMessageParam,
                OpenAI::Models::Chat::ChatCompletionUserMessageParam,
                OpenAI::Models::Chat::ChatCompletionAssistantMessageParam,
                OpenAI::Models::Chat::ChatCompletionToolMessageParam,
                OpenAI::Models::Chat::ChatCompletionFunctionMessageParam
              )
            ],
            model: T.any(String, OpenAI::Models::ChatModel::OrSymbol),
            audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::AnyHash)),
            frequency_penalty: T.nilable(Float),
            function_call: T.any(
              OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol,
              OpenAI::Models::Chat::ChatCompletionFunctionCallOption,
              OpenAI::Internal::AnyHash
            ),
            functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::AnyHash)],
            logit_bias: T.nilable(T::Hash[Symbol, Integer]),
            logprobs: T.nilable(T::Boolean),
            max_completion_tokens: T.nilable(Integer),
            max_tokens: T.nilable(Integer),
            metadata: T.nilable(T::Hash[Symbol, String]),
            modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]),
            n: T.nilable(Integer),
            parallel_tool_calls: T::Boolean,
            prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::AnyHash)),
            presence_penalty: T.nilable(Float),
            reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol),
            response_format: T.any(
              OpenAI::Models::ResponseFormatText,
              OpenAI::Internal::AnyHash,
              OpenAI::Models::ResponseFormatJSONSchema,
              OpenAI::Models::ResponseFormatJSONObject
            ),
            seed: T.nilable(Integer),
            service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol),
            stop: T.nilable(T.any(String, T::Array[String])),
            store: T.nilable(T::Boolean),
            stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)),
            temperature: T.nilable(Float),
            tool_choice: T.any(
              OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol,
              OpenAI::Models::Chat::ChatCompletionNamedToolChoice,
              OpenAI::Internal::AnyHash
            ),
            tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::AnyHash)],
            top_logprobs: T.nilable(Integer),
            top_p: T.nilable(Float),
            user: String,
            web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::AnyHash),
            stream: T.noreturn,
            request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash))
          )
            .returns(OpenAI::Internal::Stream[OpenAI::Models::Chat::ChatCompletionChunk])
        end