in source/image-handler/image-handler.ts [76:248]
public async applyEdits(originalImage: sharp.Sharp, edits: ImageEdits): Promise<sharp.Sharp> {
if (edits.resize === undefined) {
edits.resize = {};
edits.resize.fit = ImageFitTypes.INSIDE;
} else {
if (edits.resize.width) edits.resize.width = Math.round(Number(edits.resize.width));
if (edits.resize.height) edits.resize.height = Math.round(Number(edits.resize.height));
}
// Apply the image edits
for (const edit in edits) {
switch (edit) {
case 'overlayWith': {
let imageMetadata: sharp.Metadata = await originalImage.metadata();
if (edits.resize) {
const imageBuffer = await originalImage.toBuffer();
const resizeOptions: ResizeOptions = edits.resize;
imageMetadata = await sharp(imageBuffer).resize(resizeOptions).metadata();
}
const { bucket, key, wRatio, hRatio, alpha, options } = edits.overlayWith;
const overlay = await this.getOverlayImage(bucket, key, wRatio, hRatio, alpha, imageMetadata);
const overlayMetadata = await sharp(overlay).metadata();
const overlayOption: OverlayOptions = { ...options, input: overlay };
if (options) {
const { left: leftOption, top: topOption } = options;
const getSize = (editSize: string | undefined, imageSize: number, overlaySize: number): number => {
let resultSize = NaN;
if (editSize !== undefined) {
if (editSize.endsWith('p')) {
resultSize = parseInt(editSize.replace('p', ''));
resultSize = Math.floor(resultSize < 0 ? imageSize + (imageSize * resultSize) / 100 - overlaySize : (imageSize * resultSize) / 100);
} else {
resultSize = parseInt(editSize);
if (resultSize < 0) {
resultSize = imageSize + resultSize - overlaySize;
}
}
}
return resultSize;
};
const left = getSize(leftOption, imageMetadata.width, overlayMetadata.width);
if (!isNaN(left)) overlayOption.left = left;
const top = getSize(topOption, imageMetadata.height, overlayMetadata.height);
if (!isNaN(top)) overlayOption.top = top;
}
originalImage.composite([overlayOption]);
break;
}
case 'smartCrop': {
// smart crop can be boolean or object
if (edits.smartCrop === true || typeof edits.smartCrop === 'object') {
const { faceIndex, padding } =
typeof edits.smartCrop === 'object'
? edits.smartCrop
: {
faceIndex: undefined,
padding: undefined
};
const { imageBuffer, format } = await this.getRekognitionCompatibleImage(originalImage);
const boundingBox = await this.getBoundingBox(imageBuffer.data, faceIndex ?? 0);
const cropArea = this.getCropArea(boundingBox, padding ?? 0, imageBuffer.info);
try {
originalImage.extract(cropArea);
// convert image back to previous format
if (format !== imageBuffer.info.format) {
originalImage.toFormat(format);
}
} catch (error) {
throw new ImageHandlerError(
StatusCodes.BAD_REQUEST,
'SmartCrop::PaddingOutOfBounds',
'The padding value you provided exceeds the boundaries of the original image. Please try choosing a smaller value or applying padding via Sharp for greater specificity.'
);
}
}
break;
}
case 'roundCrop': {
// round crop can be boolean or object
if (edits.roundCrop === true || typeof edits.roundCrop === 'object') {
const { top, left, rx, ry } =
typeof edits.roundCrop === 'object'
? edits.roundCrop
: {
top: undefined,
left: undefined,
rx: undefined,
ry: undefined
};
const imageBuffer = await originalImage.toBuffer({ resolveWithObject: true });
const width = imageBuffer.info.width;
const height = imageBuffer.info.height;
// check for parameters, if not provided, set to defaults
const radiusX = rx && rx >= 0 ? rx : Math.min(width, height) / 2;
const radiusY = ry && ry >= 0 ? ry : Math.min(width, height) / 2;
const topOffset = top && top >= 0 ? top : height / 2;
const leftOffset = left && left >= 0 ? left : width / 2;
const ellipse = Buffer.from(`<svg viewBox="0 0 ${width} ${height}"> <ellipse cx="${leftOffset}" cy="${topOffset}" rx="${radiusX}" ry="${radiusY}" /></svg>`);
const overlayOptions: OverlayOptions[] = [{ input: ellipse, blend: 'dest-in' }];
const data = await originalImage.composite(overlayOptions).toBuffer();
originalImage = sharp(data).withMetadata().trim();
}
break;
}
case 'contentModeration': {
// content moderation can be boolean or object
if (edits.contentModeration === true || typeof edits.contentModeration === 'object') {
const { minConfidence, blur, moderationLabels } =
typeof edits.contentModeration === 'object'
? edits.contentModeration
: {
minConfidence: undefined,
blur: undefined,
moderationLabels: undefined
};
const { imageBuffer, format } = await this.getRekognitionCompatibleImage(originalImage);
const inappropriateContent = await this.detectInappropriateContent(imageBuffer.data, minConfidence);
const blurValue = blur !== undefined ? Math.ceil(blur) : 50;
if (blurValue >= 0.3 && blurValue <= 1000) {
if (moderationLabels) {
for (const moderationLabel of inappropriateContent.ModerationLabels) {
if (moderationLabels.includes(moderationLabel.Name)) {
originalImage.blur(blur);
break;
}
}
} else if (inappropriateContent.ModerationLabels.length) {
originalImage.blur(blur);
}
}
// convert image back to previous format
if (format !== imageBuffer.info.format) {
originalImage.toFormat(format);
}
}
break;
}
case 'crop': {
try {
originalImage.extract(edits.crop);
} catch (error) {
throw new ImageHandlerError(
StatusCodes.BAD_REQUEST,
'Crop::AreaOutOfBounds',
'The cropping area you provided exceeds the boundaries of the original image. Please try choosing a correct cropping value.'
);
}
break;
}
default: {
if (edit in originalImage) {
originalImage[edit](edits[edit]);
}
}
}
}
// Return the modified image
return originalImage;
}