public mapFilter()

in source/image-handler/thumbor-mapper.ts [78:264]


  public mapFilter(filterExpression: string, fileFormat: ImageFormatTypes, previousEdits: ImageEdits = {}): ImageEdits {
    const matched = filterExpression.match(/:(.+)\((.*)\)/);
    const [_, filterName, filterValue] = matched;
    const currentEdits = { ...previousEdits };

    // Find the proper filter
    switch (filterName) {
      case 'autojpg': {
        currentEdits.toFormat = ImageFormatTypes.JPEG;
        break;
      }
      case 'background_color': {
        const color = !ColorName[filterValue] ? `#${filterValue}` : filterValue;

        currentEdits.flatten = { background: Color(color).object() };
        break;
      }
      case 'blur': {
        const [radius, sigma] = filterValue.split(',').map(x => (x === '' ? NaN : Number(x)));
        currentEdits.blur = !isNaN(sigma) ? sigma : radius / 2;
        break;
      }
      case 'convolution': {
        const values = filterValue.split(',');
        const matrix = values[0].split(';').map(str => Number(str));
        const matrixWidth = Number(values[1]);
        let matrixHeight = 0;
        let counter = 0;

        for (let i = 0; i < matrix.length; i++) {
          if (counter === matrixWidth - 1) {
            matrixHeight++;
            counter = 0;
          } else {
            counter++;
          }
        }

        currentEdits.convolve = {
          width: matrixWidth,
          height: matrixHeight,
          kernel: matrix
        };
        break;
      }
      case 'equalize': {
        currentEdits.normalize = true;
        break;
      }
      case 'fill': {
        if (currentEdits.resize === undefined) {
          currentEdits.resize = {};
        }

        let color = filterValue;
        if (!ColorName[color]) {
          color = `#${color}`;
        }

        currentEdits.resize.fit = ImageFitTypes.CONTAIN;
        currentEdits.resize.background = Color(color).object();
        break;
      }
      case 'format': {
        const imageFormatType = filterValue.replace(/[^0-9a-z]/gi, '').replace(/jpg/i, 'jpeg') as ImageFormatTypes;
        const acceptedValues = [
          ImageFormatTypes.HEIC,
          ImageFormatTypes.HEIF,
          ImageFormatTypes.JPEG,
          ImageFormatTypes.PNG,
          ImageFormatTypes.RAW,
          ImageFormatTypes.TIFF,
          ImageFormatTypes.WEBP
        ];

        if (acceptedValues.includes(imageFormatType)) {
          currentEdits.toFormat = imageFormatType;
        }
        break;
      }
      case 'grayscale': {
        currentEdits.grayscale = true;
        break;
      }
      case 'no_upscale': {
        if (currentEdits.resize === undefined) {
          currentEdits.resize = {};
        }

        currentEdits.resize.withoutEnlargement = true;
        break;
      }
      case 'proportion': {
        if (currentEdits.resize === undefined) {
          currentEdits.resize = {};
        }
        const ratio = Number(filterValue);

        currentEdits.resize.width = Number(currentEdits.resize.width * ratio);
        currentEdits.resize.height = Number(currentEdits.resize.height * ratio);
        break;
      }
      case 'quality': {
        const toSupportedImageFormatType = (format: ImageFormatTypes): ImageFormatTypes =>
          [ImageFormatTypes.JPG, ImageFormatTypes.JPEG].includes(format)
            ? ImageFormatTypes.JPEG
            : [ImageFormatTypes.PNG, ImageFormatTypes.WEBP, ImageFormatTypes.TIFF, ImageFormatTypes.HEIF].includes(format)
            ? format
            : null;

        // trying to get a target image type base on `fileFormat` passed to the current method.
        // if we cannot get the target format, then trying to get the target format from `format` filter.
        const targetImageFileFormat = toSupportedImageFormatType(fileFormat) ?? toSupportedImageFormatType(currentEdits.toFormat);

        if (targetImageFileFormat) {
          currentEdits[targetImageFileFormat] = { quality: Number(filterValue) };
        }
        break;
      }
      case 'rgb': {
        const percentages = filterValue.split(',');
        const values = percentages.map(percentage => 255 * (Number(percentage) / 100));
        const [r, g, b] = values;

        currentEdits.tint = { r, g, b };
        break;
      }
      case 'rotate': {
        currentEdits.rotate = Number(filterValue);
        break;
      }
      case 'sharpen': {
        const values = filterValue.split(',');

        currentEdits.sharpen = 1 + Number(values[1]) / 2;
        break;
      }
      case 'stretch': {
        if (currentEdits.resize === undefined) {
          currentEdits.resize = {};
        }

        // If fit-in is not defined, fit parameter would be 'fill'.
        if (currentEdits.resize.fit !== ImageFitTypes.INSIDE) {
          currentEdits.resize.fit = ImageFitTypes.FILL;
        }
        break;
      }
      case 'strip_exif':
      case 'strip_icc': {
        currentEdits.rotate = null;
        break;
      }
      case 'upscale': {
        if (currentEdits.resize === undefined) {
          currentEdits.resize = {};
        }

        currentEdits.resize.fit = ImageFitTypes.INSIDE;
        break;
      }
      case 'watermark': {
        const options = filterValue.replace(/\s+/g, '').split(',');
        const [bucket, key, xPos, yPos, alpha, wRatio, hRatio] = options;

        currentEdits.overlayWith = {
          bucket,
          key,
          alpha,
          wRatio,
          hRatio,
          options: {}
        };

        const allowedPosPattern = /^(100|[1-9]?[0-9]|-(100|[1-9][0-9]?))p$/;
        if (allowedPosPattern.test(xPos) || !isNaN(Number(xPos))) {
          currentEdits.overlayWith.options.left = xPos;
        }
        if (allowedPosPattern.test(yPos) || !isNaN(Number(yPos))) {
          currentEdits.overlayWith.options.top = yPos;
        }
        break;
      }
    }

    return currentEdits;
  }