def load_resource_spec()

in src/rpdk/core/data_loaders.py [0:0]


def load_resource_spec(resource_spec_file):  # pylint: disable=R # noqa: C901
    """Load a resource provider definition from a file, and validate it."""
    try:
        resource_spec = json.load(resource_spec_file)
    except ValueError as e:
        LOG.debug("Resource spec decode failed", exc_info=True)
        raise SpecValidationError(str(e)) from e

    validator = make_resource_validator()
    additional_properties_validator = (
        make_resource_validator_with_additional_properties_check()
    )
    try:
        validator.validate(resource_spec)
    except ValidationError as e:
        LOG.debug("Resource spec validation failed", exc_info=True)
        raise SpecValidationError(str(e)) from e

    try:  # pylint: disable=R
        for _key, schema in JsonSchemaFlattener(resource_spec).flatten_schema().items():
            for property_name, property_details in schema.get("properties", {}).items():
                if property_name[0].islower():
                    LOG.warning(
                        "CloudFormation properties don't usually start with lowercase letters: %s",
                        property_name,
                    )
                try:
                    property_type = property_details["type"]
                    property_keywords = property_details.keys()
                    if (
                        property_type == "array"
                        and "insertionOrder" not in property_keywords
                    ):
                        LOG.warning(
                            "Explicitly specify value for insertionOrder for array: %s",
                            property_name,
                        )
                    keyword_mappings = [
                        (
                            {"integer", "number"},
                            {
                                "minimum",
                                "maximum",
                                "exclusiveMinimum",
                                "exclusiveMaximum",
                                "multipleOf",
                            },
                        ),
                        (
                            {"string"},
                            {
                                "minLength",
                                "maxLength",
                                "pattern",
                            },
                        ),
                        (
                            {"object"},
                            {
                                "minProperties",
                                "maxProperties",
                                "additionalProperties",
                                "patternProperties",
                            },
                        ),
                        (
                            {"array"},
                            {
                                "minItems",
                                "maxItems",
                                "additionalItems",
                                "uniqueItems",
                            },
                        ),
                    ]
                    type_specific_keywords = set().union(
                        *(mapping[1] for mapping in keyword_mappings)
                    )
                    for types, allowed_keywords in keyword_mappings:
                        if (
                            property_type in types
                            and type_specific_keywords - allowed_keywords
                            & property_keywords
                        ):
                            LOG.warning(
                                "Incorrect JSON schema keyword(s) %s for type: %s for property: %s",
                                type_specific_keywords - allowed_keywords
                                & property_keywords,
                                property_type,
                                property_name,
                            )
                except (KeyError, TypeError):
                    pass
    except FlatteningError:
        pass

    for pattern in nested_lookup("pattern", resource_spec):
        if "arn:aws:" in pattern:
            LOG.warning(
                "Don't hardcode the aws partition in ARN patterns: %s",
                pattern,
            )
        try:
            re.compile(pattern)
        except re.error:
            LOG.warning("Could not validate regular expression: %s", pattern)

    for enum in nested_lookup("enum", resource_spec):
        if len(enum) > 15:
            LOG.warning(
                "Consider not manually maintaining large constantly evolving enums like \