S2N_RESULT s2n_prf_get_digest_for_ems()

in tls/s2n_prf.c [51:134]


S2N_RESULT s2n_prf_get_digest_for_ems(struct s2n_connection *conn, struct s2n_blob *message,
        s2n_hash_algorithm hash_alg, struct s2n_blob *output);
S2N_RESULT s2n_prf_tls_extended_master_secret(struct s2n_connection *conn,
        struct s2n_blob *premaster_secret, struct s2n_blob *session_hash, struct s2n_blob *sha1_hash);

S2N_RESULT s2n_key_material_init(struct s2n_key_material *key_material, struct s2n_connection *conn)
{
    RESULT_ENSURE_REF(key_material);
    RESULT_ENSURE_REF(conn);
    RESULT_ENSURE_REF(conn->secure);
    RESULT_ENSURE_REF(conn->secure->cipher_suite);
    RESULT_ENSURE_REF(conn->secure->cipher_suite->record_alg);
    const struct s2n_cipher *cipher = conn->secure->cipher_suite->record_alg->cipher;
    RESULT_ENSURE_REF(cipher);

    uint8_t mac_size = 0;
    uint32_t key_size = 0;
    uint32_t iv_size = 0;

    /* MAC size */
    if (cipher->type == S2N_COMPOSITE) {
        mac_size = cipher->io.comp.mac_key_size;
    } else {
        RESULT_GUARD_POSIX(s2n_hmac_digest_size(conn->secure->cipher_suite->record_alg->hmac_alg, &mac_size));
    }

    /* KEY size */
    key_size = cipher->key_material_size;

    /* Only AEAD ciphers have implicit IVs for TLS >= 1.1 */
    if (conn->actual_protocol_version <= S2N_TLS10 || cipher->type == S2N_AEAD) {
        /* IV size */
        switch (cipher->type) {
            case S2N_AEAD:
                iv_size = cipher->io.aead.fixed_iv_size;
                break;
            case S2N_CBC:
                iv_size = cipher->io.cbc.block_size;
                break;
            case S2N_COMPOSITE:
                iv_size = cipher->io.comp.block_size;
                break;
            /* No-op for stream ciphers */
            default:
                break;
        }
    }

    struct s2n_stuffer key_material_stuffer = { 0 };
    struct s2n_blob key_material_blob = { 0 };
    RESULT_GUARD_POSIX(s2n_blob_init(&key_material_blob, key_material->key_block, sizeof(key_material->key_block)));
    RESULT_GUARD_POSIX(s2n_stuffer_init_written(&key_material_stuffer, &key_material_blob));

    /* initialize key_material blobs; incrementing ptr to point to the next slice of memory */
    uint8_t *ptr = NULL;
    /* MAC */
    ptr = s2n_stuffer_raw_read(&key_material_stuffer, mac_size);
    RESULT_ENSURE_REF(ptr);
    RESULT_GUARD_POSIX(s2n_blob_init(&key_material->client_mac, ptr, mac_size));

    ptr = s2n_stuffer_raw_read(&key_material_stuffer, mac_size);
    RESULT_ENSURE_REF(ptr);
    RESULT_GUARD_POSIX(s2n_blob_init(&key_material->server_mac, ptr, mac_size));

    /* KEY */
    ptr = s2n_stuffer_raw_read(&key_material_stuffer, key_size);
    RESULT_ENSURE_REF(ptr);
    RESULT_GUARD_POSIX(s2n_blob_init(&key_material->client_key, ptr, key_size));

    ptr = s2n_stuffer_raw_read(&key_material_stuffer, key_size);
    RESULT_ENSURE_REF(ptr);
    RESULT_GUARD_POSIX(s2n_blob_init(&key_material->server_key, ptr, key_size));

    /* IV */
    ptr = s2n_stuffer_raw_read(&key_material_stuffer, iv_size);
    RESULT_ENSURE_REF(ptr);
    RESULT_GUARD_POSIX(s2n_blob_init(&key_material->client_iv, ptr, iv_size));

    ptr = s2n_stuffer_raw_read(&key_material_stuffer, iv_size);
    RESULT_ENSURE_REF(ptr);
    RESULT_GUARD_POSIX(s2n_blob_init(&key_material->server_iv, ptr, iv_size));

    return S2N_RESULT_OK;
}