def _get_attributes_from_wrapper()

in instrumentation/elastic-opentelemetry-instrumentation-openai/src/opentelemetry/instrumentation/openai/helpers.py [0:0]


def _get_attributes_from_wrapper(instance, kwargs) -> Attributes:
    # we import this here to avoid races with other instrumentations
    try:
        # available since 1.13.4
        from openai import NotGiven
    except ImportError:
        NotGiven = None

    def _is_set(value):
        if NotGiven is not None:
            return value is not None and not isinstance(value, NotGiven)
        return value is not None

    span_attributes = {
        GEN_AI_OPERATION_NAME: "chat",
        GEN_AI_SYSTEM: "openai",
    }

    if _is_set(request_model := kwargs.get("model")):
        span_attributes[GEN_AI_REQUEST_MODEL] = request_model

    if client := getattr(instance, "_client", None):
        span_attributes.update(_attributes_from_client(client))

    if _is_set(choice_count := kwargs.get("n")) and choice_count != 1:
        span_attributes[GEN_AI_REQUEST_CHOICE_COUNT] = choice_count
    if _is_set(frequency_penalty := kwargs.get("frequency_penalty")):
        span_attributes[GEN_AI_REQUEST_FREQUENCY_PENALTY] = frequency_penalty
    if _is_set(max_tokens := kwargs.get("max_completion_tokens", kwargs.get("max_tokens"))):
        span_attributes[GEN_AI_REQUEST_MAX_TOKENS] = max_tokens
    if _is_set(presence_penalty := kwargs.get("presence_penalty")):
        span_attributes[GEN_AI_REQUEST_PRESENCE_PENALTY] = presence_penalty
    if _is_set(temperature := kwargs.get("temperature")):
        span_attributes[GEN_AI_REQUEST_TEMPERATURE] = temperature
    if _is_set(top_p := kwargs.get("top_p")):
        span_attributes[GEN_AI_REQUEST_TOP_P] = top_p
    if _is_set(stop_sequences := kwargs.get("stop")):
        if isinstance(stop_sequences, str):
            stop_sequences = [stop_sequences]
        span_attributes[GEN_AI_REQUEST_STOP_SEQUENCES] = stop_sequences
    if _is_set(seed := kwargs.get("seed")):
        span_attributes[GEN_AI_REQUEST_SEED] = seed
    if _is_set(service_tier := kwargs.get("service_tier")):
        span_attributes[GEN_AI_OPENAI_REQUEST_SERVICE_TIER] = service_tier
    if _is_set(response_format := kwargs.get("response_format")):
        # response_format may be string or object with a string in the `type` key
        if isinstance(response_format, Mapping):
            if _is_set(response_format_type := response_format.get("type")):
                if response_format_type in ("json_object", "json_schema"):
                    span_attributes[GEN_AI_OUTPUT_TYPE] = "json"
                else:
                    span_attributes[GEN_AI_OUTPUT_TYPE] = response_format_type
        elif isinstance(response_format, str):
            span_attributes[GEN_AI_OUTPUT_TYPE] = response_format
        else:
            # Assume structured output lazily parsed to a schema via type_to_response_format_param or similar.
            # e.g. pydantic._internal._model_construction.ModelMetaclass
            span_attributes[GEN_AI_OUTPUT_TYPE] = "json"

    return span_attributes