in src/nodes/server/text-to-image.ts [34:111]
async runWithInputs(inputs: Inputs) {
const {
prompt,
negative_prompt,
num_inference_steps,
guidance_scale,
seed,
modelid,
apikey,
useCache,
} = inputs;
const _modelid = modelid?.trim();
if (this.hf && apikey) {
this.hf = new HfInference(apikey);
}
if (!prompt) {
this.dispatchEvent(
new CustomEvent("outputs", { detail: { results: null } })
);
return;
}
if (this.cachedOutput && compareObjects(this.cachedInputs, inputs)) {
console.info("Using cached output");
this.dispatchEvent(
new CustomEvent("outputs", { detail: this.cachedOutput })
);
return;
}
try {
const imageBlob = await this.hf?.textToImage(
{
model: _modelid,
inputs: prompt,
parameters: {
negative_prompt,
num_inference_steps,
guidance_scale,
// @ts-ignore
seed,
},
},
{ wait_for_model: true, use_cache: useCache }
);
if (!imageBlob) {
throw new Error("Invalid response");
}
const base64img = (await blobToBase64(imageBlob)) as string;
const output = base64img.replace(
/data:image\/png;base64,|data:image\/jpeg;base64,/g,
""
);
this.cachedOutput = { results: output };
this.cachedInputs = inputs;
this.dispatchEvent(
new CustomEvent("outputs", {
detail: { results: output },
})
);
} catch (error: any) {
this.dispatchEvent(
new CustomEvent("outputs", {
detail: {
error: {
title: "Error",
message: error.message,
},
},
})
);
}
}