def validate_unique_mrks()

in src/aws_encryption_sdk/key_providers/kms.py [0:0]


    def validate_unique_mrks(self):
        """Make sure the set of configured key ids does not contain any related MRKs"""
        # //= compliance/framework/aws-kms/aws-kms-mrk-aware-master-key-provider.txt#2.6
        # //# All AWS KMS
        # //# key identifiers are be passed to Assert AWS KMS MRK are unique (aws-
        # //# kms-mrk-are-unique.md#Implementation) and the function MUST return
        # //# success.

        # //= compliance/framework/aws-kms/aws-kms-mrk-are-unique.txt#2.5
        # //# The caller MUST provide:

        # //= compliance/framework/aws-kms/aws-kms-mrk-are-unique.txt#2.5
        # //# If the list does not contain any multi-Region keys (aws-kms-key-
        # //# arn.md#identifying-an-aws-kms-multi-region-key) this function MUST
        # //# exit successfully.
        mrk_identifiers = filter(is_valid_mrk_identifier, self.config.key_ids)
        duplicate_ids = set()
        for key1, key2 in itertools.combinations(mrk_identifiers, 2):
            if key1 in duplicate_ids and key2 in duplicate_ids:
                pass
            if _key_resource_match(key1, key2):
                if key1 not in duplicate_ids:
                    duplicate_ids.add(key1)
                if key2 not in duplicate_ids:
                    duplicate_ids.add(key2)

        # //= compliance/framework/aws-kms/aws-kms-mrk-are-unique.txt#2.5
        # //# If there are zero duplicate resource ids between the multi-region
        # //# keys, this function MUST exit successfully

        # //= compliance/framework/aws-kms/aws-kms-mrk-are-unique.txt#2.5
        # //# If any duplicate multi-region resource ids exist, this function MUST
        # //# yield an error that includes all identifiers with duplicate resource
        # //# ids not only the first duplicate found.
        if len(duplicate_ids) > 0:
            raise ConfigMismatchError(
                "Configured key ids must be unique. Found related MRKs: {keys}".format(keys=", ".join(duplicate_ids))
            )