def _process_large_attributes()

in Runtime_env/app/utils/tracing.py [0:0]


    def _process_large_attributes(self, span_dict: dict, span_id: str) -> dict:
        """
        Process large attribute values by storing them in GCS if they exceed the size
        limit of Google Cloud Logging.

        :param span_dict: The span data dictionary
        :param trace_id: The trace ID
        :param span_id: The span ID
        :return: The updated span dictionary
        """
        attributes = span_dict["attributes"]
        if len(json.dumps(attributes).encode()) > 255 * 1024:  # 250 KB
            # Separate large payload from other attributes
            attributes_payload = {
                k: v
                for k, v in attributes.items()
                if "traceloop.association.properties" not in k
            }
            attributes_retain = {
                k: v
                for k, v in attributes.items()
                if "traceloop.association.properties" in k
            }

            # Store large payload in GCS
            gcs_uri = self.store_in_gcs(json.dumps(attributes_payload), span_id)
            attributes_retain["uri_payload"] = gcs_uri
            attributes_retain["url_payload"] = (
                f"https://storage.mtls.cloud.google.com/"
                f"{self.bucket_name}/spans/{span_id}.json"
            )

            span_dict["attributes"] = attributes_retain
            logging.info(
                "Length of payload span above 250 KB, storing attributes in GCS "
                "to avoid large log entry errors"
            )

        return span_dict