in rbi/lib/openai/resources/completions.rbi [32:170]
def create(
model:,
prompt:,
best_of: nil,
echo: nil,
frequency_penalty: nil,
logit_bias: nil,
logprobs: nil,
max_tokens: nil,
n: nil,
presence_penalty: nil,
seed: nil,
stop: nil,
stream_options: nil,
suffix: nil,
temperature: nil,
top_p: nil,
user: nil,
stream: false,
request_options: {}
); end
sig do
params(
model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol),
prompt: T.nilable(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])),
best_of: T.nilable(Integer),
echo: T.nilable(T::Boolean),
frequency_penalty: T.nilable(Float),
logit_bias: T.nilable(T::Hash[Symbol, Integer]),
logprobs: T.nilable(Integer),
max_tokens: T.nilable(Integer),
n: T.nilable(Integer),
presence_penalty: T.nilable(Float),
seed: T.nilable(Integer),
stop: T.nilable(T.any(String, T::Array[String])),
stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)),
suffix: T.nilable(String),
temperature: T.nilable(Float),
top_p: T.nilable(Float),
user: String,
stream: T.noreturn,
request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash))
)
.returns(OpenAI::Internal::Stream[OpenAI::Models::Completion])
end