def __init__()

in elasticapm/base.py [0:0]


    def __init__(self, config=None, **inline) -> None:
        # configure loggers first
        cls = self.__class__
        self.logger = get_logger("%s.%s" % (cls.__module__, cls.__name__))
        self.error_logger = get_logger("elasticapm.errors")

        self._pid = None
        self._thread_starter_lock = threading.Lock()
        self._thread_managers = {}

        self.tracer = None
        self.processors = []
        self.filter_exception_types_dict = {}
        self._service_info = None
        self._server_version = None
        # setting server_version here is mainly used for testing
        self.server_version = inline.pop("server_version", None)
        self.activation_method = elasticapm._activation_method
        self._extra_metadata = {}

        self.check_python_version()

        config = Config(config, inline_dict=inline)
        if config.errors:
            for msg in config.errors.values():
                self.error_logger.error(msg)
            config.disable_send = True
        if config.service_name == "unknown-python-service":
            self.logger.warning("No custom SERVICE_NAME was set -- using non-descript default 'unknown-python-service'")
        if config.service_name.strip() == "":
            self.logger.error(
                "SERVICE_NAME cannot be whitespace-only. Please set the SERVICE_NAME to a useful identifier."
            )
        self.config = VersionedConfig(config, version=None)

        # Insert the log_record_factory into the logging library
        if not self.config.disable_log_record_factory:
            record_factory = logging.getLogRecordFactory()
            # Only way to know if it's wrapped is to create a log record
            throwaway_record = record_factory(__name__, logging.DEBUG, __file__, 252, "dummy_msg", [], None)
            if not hasattr(throwaway_record, "elasticapm_labels"):
                self.logger.debug("Inserting elasticapm log_record_factory into logging")

                # Late import due to circular imports
                import elasticapm.handlers.logging as elastic_logging

                new_factory = elastic_logging.log_record_factory(record_factory)
                logging.setLogRecordFactory(new_factory)

        headers = {
            "User-Agent": self.get_user_agent(),
        }

        transport_kwargs = {
            "headers": headers,
            "verify_server_cert": self.config.verify_server_cert,
            "server_cert": self.config.server_cert,
            "server_ca_cert_file": self.config.server_ca_cert_file,
            "timeout": self.config.server_timeout,
            "processors": self.load_processors(),
        }
        if config.transport_json_serializer:
            json_serializer_func = import_string(config.transport_json_serializer)
            transport_kwargs["json_serializer"] = json_serializer_func

        self._api_endpoint_url = urllib.parse.urljoin(
            self.config.server_url if self.config.server_url.endswith("/") else self.config.server_url + "/",
            constants.EVENTS_API_PATH,
        )
        transport_class = import_string(self.config.transport_class)
        self._transport = transport_class(url=self._api_endpoint_url, client=self, **transport_kwargs)
        self.config.transport = self._transport
        self._thread_managers["transport"] = self._transport

        for exc_to_filter in self.config.filter_exception_types or []:
            exc_to_filter_type = exc_to_filter.split(".")[-1]
            exc_to_filter_module = ".".join(exc_to_filter.split(".")[:-1])
            self.filter_exception_types_dict[exc_to_filter_type] = exc_to_filter_module

        if platform.python_implementation() == "PyPy":
            # PyPy introduces a `_functools.partial.__call__` frame due to our use
            # of `partial` in AbstractInstrumentedModule
            skip_modules = ("elasticapm.", "_functools")
        else:
            skip_modules = ("elasticapm.",)

        self.tracer = Tracer(
            frames_collector_func=lambda: list(
                stacks.iter_stack_frames(
                    start_frame=inspect.currentframe(), skip_top_modules=skip_modules, config=self.config
                )
            ),
            frames_processing_func=lambda frames: self._get_stack_info_for_trace(
                frames,
                library_frame_context_lines=self.config.source_lines_span_library_frames,
                in_app_frame_context_lines=self.config.source_lines_span_app_frames,
                with_locals=self.config.collect_local_variables in ("all", "transactions"),
                locals_processor_func=lambda local_var: varmap(
                    lambda k, v: shorten(
                        v,
                        list_length=self.config.local_var_list_max_length,
                        string_length=self.config.local_var_max_length,
                        dict_length=self.config.local_var_dict_max_length,
                    ),
                    local_var,
                ),
            ),
            queue_func=self.queue,
            config=self.config,
            agent=self,
        )
        self.include_paths_re = stacks.get_path_regex(self.config.include_paths) if self.config.include_paths else None
        self.exclude_paths_re = stacks.get_path_regex(self.config.exclude_paths) if self.config.exclude_paths else None
        self.metrics = MetricsRegistry(self)
        for path in self.config.metrics_sets:
            self.metrics.register(path)
        if self.config.breakdown_metrics:
            self.metrics.register("elasticapm.metrics.sets.breakdown.BreakdownMetricSet")
        if self.config.prometheus_metrics:
            self.metrics.register("elasticapm.metrics.sets.prometheus.PrometheusMetrics")
        if self.config.metrics_interval:
            self._thread_managers["metrics"] = self.metrics
        compat.atexit_register(self.close)
        if self.config.central_config:
            self._thread_managers["config"] = self.config
        else:
            self._config_updater = None
        if self.config.use_elastic_excepthook:
            self.original_excepthook = sys.excepthook
            sys.excepthook = self._excepthook
        if config.enabled:
            self.start_threads()

        # Save this Client object as the global CLIENT_SINGLETON
        set_client(self)