def factory_wrapper()

in torchx/pipelines/kfp/adapter.py [0:0]


    def factory_wrapper(*args: object, **kwargs: object) -> dsl.ContainerOp:
        c = component_factory(*args, **kwargs)
        container = c.container

        if ui_metadata is not None:
            # We generate the UI metadata from the sidecar so we need to make
            # both the container and the sidecar share the same tmp directory so
            # the outputs appear in the original container.
            c.add_volume(V1Volume(name="tmp", empty_dir=V1EmptyDirVolumeSource()))
            container.add_volume_mount(
                V1VolumeMount(
                    name="tmp",
                    mount_path="/tmp/",
                )
            )
            c.output_artifact_paths["mlpipeline-ui-metadata"] = METADATA_FILE
            c.add_sidecar(_ui_metadata_sidecar(ui_metadata))

        cpu = resources.cpu
        if cpu >= 0:
            cpu_str = f"{int(cpu*1000)}m"
            container.set_cpu_request(cpu_str)
            container.set_cpu_limit(cpu_str)
        mem = resources.memMB
        if mem >= 0:
            mem_str = f"{int(mem)}M"
            container.set_memory_request(mem_str)
            container.set_memory_limit(mem_str)
        gpu = resources.gpu
        if gpu > 0:
            container.set_gpu_limit(str(gpu))

        for name, port in role_spec.port_map.items():
            container.add_port(
                V1ContainerPort(
                    name=name,
                    container_port=port,
                ),
            )

        c.pod_labels.update(pod_labels(app, 0, role_spec, 0))

        return c