def prepare_batch_payload()

in cli/foundation-models/system/inference/text-to-image/utils/prepare_data_inpainting.py [0:0]


def prepare_batch_payload(payload_path: str) -> None:
    """Prepare payload for online deployment.

    :param payload_path: Path to payload csv file.
    :type payload_path: str
    :return: None
    """

    base_image1 = "inpainting_data/images/dog_on_bench.png"
    mask_image1 = "inpainting_data/masks/dog_on_bench.png"
    base_image2 = "inpainting_data/images/teapot.png"
    mask_image2 = "inpainting_data/masks/teapot.png"

    os.makedirs(payload_path, exist_ok=True)

    input_data = {
        "columns": ["image", "mask_image", "prompt"],
        "data": [
            {
                "image": base64.encodebytes(read_image(base_image1)).decode("utf-8"),
                "mask_image": base64.encodebytes(read_image(mask_image1)).decode(
                    "utf-8"
                ),
                "prompt": "A yellow cat, high resolution, sitting on a park bench",
            },
            {
                "image": base64.encodebytes(read_image(base_image2)).decode("utf-8"),
                "mask_image": base64.encodebytes(read_image(mask_image2)).decode(
                    "utf-8"
                ),
                "prompt": "A small flower featuring a blend of pink and purple colors.",
            },
        ],
    }
    pd.DataFrame(**input_data).to_csv(
        os.path.join(payload_path, "input1.csv"), index=False
    )

    input_data = {
        "columns": ["image", "mask_image", "prompt"],
        "data": [
            {
                "image": base64.encodebytes(read_image(base_image1)).decode("utf-8"),
                "mask_image": base64.encodebytes(read_image(mask_image1)).decode(
                    "utf-8"
                ),
                "prompt": "Pikachu, cinematic, digital art, sitting on bench",
            },
            {
                "image": base64.encodebytes(read_image(base_image2)).decode("utf-8"),
                "mask_image": base64.encodebytes(read_image(mask_image2)).decode(
                    "utf-8"
                ),
                "prompt": "A woman with red hair in the style of Tamara de Lempicka.",
            },
        ],
    }
    pd.DataFrame(**input_data).to_csv(
        os.path.join(payload_path, "input2.csv"), index=False
    )

    # Use glob to get a list of CSV files in the folder
    csv_files = glob.glob(os.path.join(payload_path, "*.csv"))

    # Read all CSV files into a single DataFrame using pd.concat
    batch_df = pd.concat((pd.read_csv(file) for file in csv_files), ignore_index=True)

    # Specify the folder where your CSV files should be saved
    processed_dataset_parent_dir = os.path.join(payload_path, "processed_batch_data")
    os.makedirs(processed_dataset_parent_dir, exist_ok=True)
    batch_input_file = "batch_input.csv"

    # Divide this into files of <x> rows each
    batch_size_per_predict = 2
    for i in range(0, len(batch_df), batch_size_per_predict):
        j = i + batch_size_per_predict
        batch_df[i:j].to_csv(
            os.path.join(processed_dataset_parent_dir, str(i) + batch_input_file)
        )

    # Check out the first and last file name created
    input_paths = sorted(
        Path(processed_dataset_parent_dir).iterdir(), key=os.path.getmtime
    )
    input_files = [os.path.basename(path) for path in input_paths]
    print(f"{input_files[0]} to {str(i)}{batch_input_file}.")