in media/libvpx/config/mac/ia32/vpx_dsp_rtcd.h [1071:1607]
static void setup_rtcd_internal(void)
{
int flags = x86_simd_caps();
(void)flags;
vpx_avg_4x4 = vpx_avg_4x4_c;
if (flags & HAS_SSE2) vpx_avg_4x4 = vpx_avg_4x4_sse2;
vpx_avg_8x8 = vpx_avg_8x8_c;
if (flags & HAS_SSE2) vpx_avg_8x8 = vpx_avg_8x8_sse2;
vpx_comp_avg_pred = vpx_comp_avg_pred_c;
if (flags & HAS_SSE2) vpx_comp_avg_pred = vpx_comp_avg_pred_sse2;
if (flags & HAS_AVX2) vpx_comp_avg_pred = vpx_comp_avg_pred_avx2;
vpx_convolve8 = vpx_convolve8_c;
if (flags & HAS_SSE2) vpx_convolve8 = vpx_convolve8_sse2;
if (flags & HAS_SSSE3) vpx_convolve8 = vpx_convolve8_ssse3;
if (flags & HAS_AVX2) vpx_convolve8 = vpx_convolve8_avx2;
vpx_convolve8_avg = vpx_convolve8_avg_c;
if (flags & HAS_SSE2) vpx_convolve8_avg = vpx_convolve8_avg_sse2;
if (flags & HAS_SSSE3) vpx_convolve8_avg = vpx_convolve8_avg_ssse3;
if (flags & HAS_AVX2) vpx_convolve8_avg = vpx_convolve8_avg_avx2;
vpx_convolve8_avg_horiz = vpx_convolve8_avg_horiz_c;
if (flags & HAS_SSE2) vpx_convolve8_avg_horiz = vpx_convolve8_avg_horiz_sse2;
if (flags & HAS_SSSE3) vpx_convolve8_avg_horiz = vpx_convolve8_avg_horiz_ssse3;
if (flags & HAS_AVX2) vpx_convolve8_avg_horiz = vpx_convolve8_avg_horiz_avx2;
vpx_convolve8_avg_vert = vpx_convolve8_avg_vert_c;
if (flags & HAS_SSE2) vpx_convolve8_avg_vert = vpx_convolve8_avg_vert_sse2;
if (flags & HAS_SSSE3) vpx_convolve8_avg_vert = vpx_convolve8_avg_vert_ssse3;
if (flags & HAS_AVX2) vpx_convolve8_avg_vert = vpx_convolve8_avg_vert_avx2;
vpx_convolve8_horiz = vpx_convolve8_horiz_c;
if (flags & HAS_SSE2) vpx_convolve8_horiz = vpx_convolve8_horiz_sse2;
if (flags & HAS_SSSE3) vpx_convolve8_horiz = vpx_convolve8_horiz_ssse3;
if (flags & HAS_AVX2) vpx_convolve8_horiz = vpx_convolve8_horiz_avx2;
vpx_convolve8_vert = vpx_convolve8_vert_c;
if (flags & HAS_SSE2) vpx_convolve8_vert = vpx_convolve8_vert_sse2;
if (flags & HAS_SSSE3) vpx_convolve8_vert = vpx_convolve8_vert_ssse3;
if (flags & HAS_AVX2) vpx_convolve8_vert = vpx_convolve8_vert_avx2;
vpx_convolve_avg = vpx_convolve_avg_c;
if (flags & HAS_SSE2) vpx_convolve_avg = vpx_convolve_avg_sse2;
vpx_convolve_copy = vpx_convolve_copy_c;
if (flags & HAS_SSE2) vpx_convolve_copy = vpx_convolve_copy_sse2;
vpx_d153_predictor_16x16 = vpx_d153_predictor_16x16_c;
if (flags & HAS_SSSE3) vpx_d153_predictor_16x16 = vpx_d153_predictor_16x16_ssse3;
vpx_d153_predictor_32x32 = vpx_d153_predictor_32x32_c;
if (flags & HAS_SSSE3) vpx_d153_predictor_32x32 = vpx_d153_predictor_32x32_ssse3;
vpx_d153_predictor_4x4 = vpx_d153_predictor_4x4_c;
if (flags & HAS_SSSE3) vpx_d153_predictor_4x4 = vpx_d153_predictor_4x4_ssse3;
vpx_d153_predictor_8x8 = vpx_d153_predictor_8x8_c;
if (flags & HAS_SSSE3) vpx_d153_predictor_8x8 = vpx_d153_predictor_8x8_ssse3;
vpx_d207_predictor_16x16 = vpx_d207_predictor_16x16_c;
if (flags & HAS_SSSE3) vpx_d207_predictor_16x16 = vpx_d207_predictor_16x16_ssse3;
vpx_d207_predictor_32x32 = vpx_d207_predictor_32x32_c;
if (flags & HAS_SSSE3) vpx_d207_predictor_32x32 = vpx_d207_predictor_32x32_ssse3;
vpx_d207_predictor_4x4 = vpx_d207_predictor_4x4_c;
if (flags & HAS_SSE2) vpx_d207_predictor_4x4 = vpx_d207_predictor_4x4_sse2;
vpx_d207_predictor_8x8 = vpx_d207_predictor_8x8_c;
if (flags & HAS_SSSE3) vpx_d207_predictor_8x8 = vpx_d207_predictor_8x8_ssse3;
vpx_d45_predictor_16x16 = vpx_d45_predictor_16x16_c;
if (flags & HAS_SSSE3) vpx_d45_predictor_16x16 = vpx_d45_predictor_16x16_ssse3;
vpx_d45_predictor_32x32 = vpx_d45_predictor_32x32_c;
if (flags & HAS_SSSE3) vpx_d45_predictor_32x32 = vpx_d45_predictor_32x32_ssse3;
vpx_d45_predictor_4x4 = vpx_d45_predictor_4x4_c;
if (flags & HAS_SSE2) vpx_d45_predictor_4x4 = vpx_d45_predictor_4x4_sse2;
vpx_d45_predictor_8x8 = vpx_d45_predictor_8x8_c;
if (flags & HAS_SSE2) vpx_d45_predictor_8x8 = vpx_d45_predictor_8x8_sse2;
vpx_d63_predictor_16x16 = vpx_d63_predictor_16x16_c;
if (flags & HAS_SSSE3) vpx_d63_predictor_16x16 = vpx_d63_predictor_16x16_ssse3;
vpx_d63_predictor_32x32 = vpx_d63_predictor_32x32_c;
if (flags & HAS_SSSE3) vpx_d63_predictor_32x32 = vpx_d63_predictor_32x32_ssse3;
vpx_d63_predictor_4x4 = vpx_d63_predictor_4x4_c;
if (flags & HAS_SSSE3) vpx_d63_predictor_4x4 = vpx_d63_predictor_4x4_ssse3;
vpx_d63_predictor_8x8 = vpx_d63_predictor_8x8_c;
if (flags & HAS_SSSE3) vpx_d63_predictor_8x8 = vpx_d63_predictor_8x8_ssse3;
vpx_dc_128_predictor_16x16 = vpx_dc_128_predictor_16x16_c;
if (flags & HAS_SSE2) vpx_dc_128_predictor_16x16 = vpx_dc_128_predictor_16x16_sse2;
vpx_dc_128_predictor_32x32 = vpx_dc_128_predictor_32x32_c;
if (flags & HAS_SSE2) vpx_dc_128_predictor_32x32 = vpx_dc_128_predictor_32x32_sse2;
vpx_dc_128_predictor_4x4 = vpx_dc_128_predictor_4x4_c;
if (flags & HAS_SSE2) vpx_dc_128_predictor_4x4 = vpx_dc_128_predictor_4x4_sse2;
vpx_dc_128_predictor_8x8 = vpx_dc_128_predictor_8x8_c;
if (flags & HAS_SSE2) vpx_dc_128_predictor_8x8 = vpx_dc_128_predictor_8x8_sse2;
vpx_dc_left_predictor_16x16 = vpx_dc_left_predictor_16x16_c;
if (flags & HAS_SSE2) vpx_dc_left_predictor_16x16 = vpx_dc_left_predictor_16x16_sse2;
vpx_dc_left_predictor_32x32 = vpx_dc_left_predictor_32x32_c;
if (flags & HAS_SSE2) vpx_dc_left_predictor_32x32 = vpx_dc_left_predictor_32x32_sse2;
vpx_dc_left_predictor_4x4 = vpx_dc_left_predictor_4x4_c;
if (flags & HAS_SSE2) vpx_dc_left_predictor_4x4 = vpx_dc_left_predictor_4x4_sse2;
vpx_dc_left_predictor_8x8 = vpx_dc_left_predictor_8x8_c;
if (flags & HAS_SSE2) vpx_dc_left_predictor_8x8 = vpx_dc_left_predictor_8x8_sse2;
vpx_dc_predictor_16x16 = vpx_dc_predictor_16x16_c;
if (flags & HAS_SSE2) vpx_dc_predictor_16x16 = vpx_dc_predictor_16x16_sse2;
vpx_dc_predictor_32x32 = vpx_dc_predictor_32x32_c;
if (flags & HAS_SSE2) vpx_dc_predictor_32x32 = vpx_dc_predictor_32x32_sse2;
vpx_dc_predictor_4x4 = vpx_dc_predictor_4x4_c;
if (flags & HAS_SSE2) vpx_dc_predictor_4x4 = vpx_dc_predictor_4x4_sse2;
vpx_dc_predictor_8x8 = vpx_dc_predictor_8x8_c;
if (flags & HAS_SSE2) vpx_dc_predictor_8x8 = vpx_dc_predictor_8x8_sse2;
vpx_dc_top_predictor_16x16 = vpx_dc_top_predictor_16x16_c;
if (flags & HAS_SSE2) vpx_dc_top_predictor_16x16 = vpx_dc_top_predictor_16x16_sse2;
vpx_dc_top_predictor_32x32 = vpx_dc_top_predictor_32x32_c;
if (flags & HAS_SSE2) vpx_dc_top_predictor_32x32 = vpx_dc_top_predictor_32x32_sse2;
vpx_dc_top_predictor_4x4 = vpx_dc_top_predictor_4x4_c;
if (flags & HAS_SSE2) vpx_dc_top_predictor_4x4 = vpx_dc_top_predictor_4x4_sse2;
vpx_dc_top_predictor_8x8 = vpx_dc_top_predictor_8x8_c;
if (flags & HAS_SSE2) vpx_dc_top_predictor_8x8 = vpx_dc_top_predictor_8x8_sse2;
vpx_fdct16x16 = vpx_fdct16x16_c;
if (flags & HAS_SSE2) vpx_fdct16x16 = vpx_fdct16x16_sse2;
if (flags & HAS_AVX2) vpx_fdct16x16 = vpx_fdct16x16_avx2;
vpx_fdct16x16_1 = vpx_fdct16x16_1_c;
if (flags & HAS_SSE2) vpx_fdct16x16_1 = vpx_fdct16x16_1_sse2;
vpx_fdct32x32 = vpx_fdct32x32_c;
if (flags & HAS_SSE2) vpx_fdct32x32 = vpx_fdct32x32_sse2;
if (flags & HAS_AVX2) vpx_fdct32x32 = vpx_fdct32x32_avx2;
vpx_fdct32x32_1 = vpx_fdct32x32_1_c;
if (flags & HAS_SSE2) vpx_fdct32x32_1 = vpx_fdct32x32_1_sse2;
vpx_fdct32x32_rd = vpx_fdct32x32_rd_c;
if (flags & HAS_SSE2) vpx_fdct32x32_rd = vpx_fdct32x32_rd_sse2;
if (flags & HAS_AVX2) vpx_fdct32x32_rd = vpx_fdct32x32_rd_avx2;
vpx_fdct4x4 = vpx_fdct4x4_c;
if (flags & HAS_SSE2) vpx_fdct4x4 = vpx_fdct4x4_sse2;
vpx_fdct4x4_1 = vpx_fdct4x4_1_c;
if (flags & HAS_SSE2) vpx_fdct4x4_1 = vpx_fdct4x4_1_sse2;
vpx_fdct8x8 = vpx_fdct8x8_c;
if (flags & HAS_SSE2) vpx_fdct8x8 = vpx_fdct8x8_sse2;
vpx_fdct8x8_1 = vpx_fdct8x8_1_c;
if (flags & HAS_SSE2) vpx_fdct8x8_1 = vpx_fdct8x8_1_sse2;
vpx_get16x16var = vpx_get16x16var_c;
if (flags & HAS_SSE2) vpx_get16x16var = vpx_get16x16var_sse2;
if (flags & HAS_AVX2) vpx_get16x16var = vpx_get16x16var_avx2;
vpx_get8x8var = vpx_get8x8var_c;
if (flags & HAS_SSE2) vpx_get8x8var = vpx_get8x8var_sse2;
vpx_get_mb_ss = vpx_get_mb_ss_c;
if (flags & HAS_SSE2) vpx_get_mb_ss = vpx_get_mb_ss_sse2;
vpx_h_predictor_16x16 = vpx_h_predictor_16x16_c;
if (flags & HAS_SSE2) vpx_h_predictor_16x16 = vpx_h_predictor_16x16_sse2;
vpx_h_predictor_32x32 = vpx_h_predictor_32x32_c;
if (flags & HAS_SSE2) vpx_h_predictor_32x32 = vpx_h_predictor_32x32_sse2;
vpx_h_predictor_4x4 = vpx_h_predictor_4x4_c;
if (flags & HAS_SSE2) vpx_h_predictor_4x4 = vpx_h_predictor_4x4_sse2;
vpx_h_predictor_8x8 = vpx_h_predictor_8x8_c;
if (flags & HAS_SSE2) vpx_h_predictor_8x8 = vpx_h_predictor_8x8_sse2;
vpx_hadamard_16x16 = vpx_hadamard_16x16_c;
if (flags & HAS_SSE2) vpx_hadamard_16x16 = vpx_hadamard_16x16_sse2;
if (flags & HAS_AVX2) vpx_hadamard_16x16 = vpx_hadamard_16x16_avx2;
vpx_hadamard_32x32 = vpx_hadamard_32x32_c;
if (flags & HAS_SSE2) vpx_hadamard_32x32 = vpx_hadamard_32x32_sse2;
if (flags & HAS_AVX2) vpx_hadamard_32x32 = vpx_hadamard_32x32_avx2;
vpx_hadamard_8x8 = vpx_hadamard_8x8_c;
if (flags & HAS_SSE2) vpx_hadamard_8x8 = vpx_hadamard_8x8_sse2;
vpx_idct16x16_10_add = vpx_idct16x16_10_add_c;
if (flags & HAS_SSE2) vpx_idct16x16_10_add = vpx_idct16x16_10_add_sse2;
vpx_idct16x16_1_add = vpx_idct16x16_1_add_c;
if (flags & HAS_SSE2) vpx_idct16x16_1_add = vpx_idct16x16_1_add_sse2;
vpx_idct16x16_256_add = vpx_idct16x16_256_add_c;
if (flags & HAS_SSE2) vpx_idct16x16_256_add = vpx_idct16x16_256_add_sse2;
if (flags & HAS_AVX2) vpx_idct16x16_256_add = vpx_idct16x16_256_add_avx2;
vpx_idct16x16_38_add = vpx_idct16x16_38_add_c;
if (flags & HAS_SSE2) vpx_idct16x16_38_add = vpx_idct16x16_38_add_sse2;
vpx_idct32x32_1024_add = vpx_idct32x32_1024_add_c;
if (flags & HAS_SSE2) vpx_idct32x32_1024_add = vpx_idct32x32_1024_add_sse2;
if (flags & HAS_AVX2) vpx_idct32x32_1024_add = vpx_idct32x32_1024_add_avx2;
vpx_idct32x32_135_add = vpx_idct32x32_135_add_c;
if (flags & HAS_SSE2) vpx_idct32x32_135_add = vpx_idct32x32_135_add_sse2;
if (flags & HAS_SSSE3) vpx_idct32x32_135_add = vpx_idct32x32_135_add_ssse3;
if (flags & HAS_AVX2) vpx_idct32x32_135_add = vpx_idct32x32_135_add_avx2;
vpx_idct32x32_1_add = vpx_idct32x32_1_add_c;
if (flags & HAS_SSE2) vpx_idct32x32_1_add = vpx_idct32x32_1_add_sse2;
vpx_idct32x32_34_add = vpx_idct32x32_34_add_c;
if (flags & HAS_SSE2) vpx_idct32x32_34_add = vpx_idct32x32_34_add_sse2;
if (flags & HAS_SSSE3) vpx_idct32x32_34_add = vpx_idct32x32_34_add_ssse3;
vpx_idct4x4_16_add = vpx_idct4x4_16_add_c;
if (flags & HAS_SSE2) vpx_idct4x4_16_add = vpx_idct4x4_16_add_sse2;
vpx_idct4x4_1_add = vpx_idct4x4_1_add_c;
if (flags & HAS_SSE2) vpx_idct4x4_1_add = vpx_idct4x4_1_add_sse2;
vpx_idct8x8_12_add = vpx_idct8x8_12_add_c;
if (flags & HAS_SSE2) vpx_idct8x8_12_add = vpx_idct8x8_12_add_sse2;
if (flags & HAS_SSSE3) vpx_idct8x8_12_add = vpx_idct8x8_12_add_ssse3;
vpx_idct8x8_1_add = vpx_idct8x8_1_add_c;
if (flags & HAS_SSE2) vpx_idct8x8_1_add = vpx_idct8x8_1_add_sse2;
vpx_idct8x8_64_add = vpx_idct8x8_64_add_c;
if (flags & HAS_SSE2) vpx_idct8x8_64_add = vpx_idct8x8_64_add_sse2;
vpx_int_pro_col = vpx_int_pro_col_c;
if (flags & HAS_SSE2) vpx_int_pro_col = vpx_int_pro_col_sse2;
vpx_int_pro_row = vpx_int_pro_row_c;
if (flags & HAS_SSE2) vpx_int_pro_row = vpx_int_pro_row_sse2;
vpx_iwht4x4_16_add = vpx_iwht4x4_16_add_c;
if (flags & HAS_SSE2) vpx_iwht4x4_16_add = vpx_iwht4x4_16_add_sse2;
vpx_lpf_horizontal_16 = vpx_lpf_horizontal_16_c;
if (flags & HAS_SSE2) vpx_lpf_horizontal_16 = vpx_lpf_horizontal_16_sse2;
if (flags & HAS_AVX2) vpx_lpf_horizontal_16 = vpx_lpf_horizontal_16_avx2;
vpx_lpf_horizontal_16_dual = vpx_lpf_horizontal_16_dual_c;
if (flags & HAS_SSE2) vpx_lpf_horizontal_16_dual = vpx_lpf_horizontal_16_dual_sse2;
if (flags & HAS_AVX2) vpx_lpf_horizontal_16_dual = vpx_lpf_horizontal_16_dual_avx2;
vpx_lpf_horizontal_4 = vpx_lpf_horizontal_4_c;
if (flags & HAS_SSE2) vpx_lpf_horizontal_4 = vpx_lpf_horizontal_4_sse2;
vpx_lpf_horizontal_4_dual = vpx_lpf_horizontal_4_dual_c;
if (flags & HAS_SSE2) vpx_lpf_horizontal_4_dual = vpx_lpf_horizontal_4_dual_sse2;
vpx_lpf_horizontal_8 = vpx_lpf_horizontal_8_c;
if (flags & HAS_SSE2) vpx_lpf_horizontal_8 = vpx_lpf_horizontal_8_sse2;
vpx_lpf_horizontal_8_dual = vpx_lpf_horizontal_8_dual_c;
if (flags & HAS_SSE2) vpx_lpf_horizontal_8_dual = vpx_lpf_horizontal_8_dual_sse2;
vpx_lpf_vertical_16 = vpx_lpf_vertical_16_c;
if (flags & HAS_SSE2) vpx_lpf_vertical_16 = vpx_lpf_vertical_16_sse2;
vpx_lpf_vertical_16_dual = vpx_lpf_vertical_16_dual_c;
if (flags & HAS_SSE2) vpx_lpf_vertical_16_dual = vpx_lpf_vertical_16_dual_sse2;
vpx_lpf_vertical_4 = vpx_lpf_vertical_4_c;
if (flags & HAS_SSE2) vpx_lpf_vertical_4 = vpx_lpf_vertical_4_sse2;
vpx_lpf_vertical_4_dual = vpx_lpf_vertical_4_dual_c;
if (flags & HAS_SSE2) vpx_lpf_vertical_4_dual = vpx_lpf_vertical_4_dual_sse2;
vpx_lpf_vertical_8 = vpx_lpf_vertical_8_c;
if (flags & HAS_SSE2) vpx_lpf_vertical_8 = vpx_lpf_vertical_8_sse2;
vpx_lpf_vertical_8_dual = vpx_lpf_vertical_8_dual_c;
if (flags & HAS_SSE2) vpx_lpf_vertical_8_dual = vpx_lpf_vertical_8_dual_sse2;
vpx_mbpost_proc_across_ip = vpx_mbpost_proc_across_ip_c;
if (flags & HAS_SSE2) vpx_mbpost_proc_across_ip = vpx_mbpost_proc_across_ip_sse2;
vpx_mbpost_proc_down = vpx_mbpost_proc_down_c;
if (flags & HAS_SSE2) vpx_mbpost_proc_down = vpx_mbpost_proc_down_sse2;
vpx_minmax_8x8 = vpx_minmax_8x8_c;
if (flags & HAS_SSE2) vpx_minmax_8x8 = vpx_minmax_8x8_sse2;
vpx_mse16x16 = vpx_mse16x16_c;
if (flags & HAS_SSE2) vpx_mse16x16 = vpx_mse16x16_sse2;
if (flags & HAS_AVX2) vpx_mse16x16 = vpx_mse16x16_avx2;
vpx_mse16x8 = vpx_mse16x8_c;
if (flags & HAS_SSE2) vpx_mse16x8 = vpx_mse16x8_sse2;
if (flags & HAS_AVX2) vpx_mse16x8 = vpx_mse16x8_avx2;
vpx_mse8x16 = vpx_mse8x16_c;
if (flags & HAS_SSE2) vpx_mse8x16 = vpx_mse8x16_sse2;
vpx_mse8x8 = vpx_mse8x8_c;
if (flags & HAS_SSE2) vpx_mse8x8 = vpx_mse8x8_sse2;
vpx_plane_add_noise = vpx_plane_add_noise_c;
if (flags & HAS_SSE2) vpx_plane_add_noise = vpx_plane_add_noise_sse2;
vpx_post_proc_down_and_across_mb_row = vpx_post_proc_down_and_across_mb_row_c;
if (flags & HAS_SSE2) vpx_post_proc_down_and_across_mb_row = vpx_post_proc_down_and_across_mb_row_sse2;
vpx_quantize_b = vpx_quantize_b_c;
if (flags & HAS_SSE2) vpx_quantize_b = vpx_quantize_b_sse2;
if (flags & HAS_SSSE3) vpx_quantize_b = vpx_quantize_b_ssse3;
if (flags & HAS_AVX) vpx_quantize_b = vpx_quantize_b_avx;
if (flags & HAS_AVX2) vpx_quantize_b = vpx_quantize_b_avx2;
vpx_quantize_b_32x32 = vpx_quantize_b_32x32_c;
if (flags & HAS_SSSE3) vpx_quantize_b_32x32 = vpx_quantize_b_32x32_ssse3;
if (flags & HAS_AVX) vpx_quantize_b_32x32 = vpx_quantize_b_32x32_avx;
if (flags & HAS_AVX2) vpx_quantize_b_32x32 = vpx_quantize_b_32x32_avx2;
vpx_sad16x16 = vpx_sad16x16_c;
if (flags & HAS_SSE2) vpx_sad16x16 = vpx_sad16x16_sse2;
vpx_sad16x16_avg = vpx_sad16x16_avg_c;
if (flags & HAS_SSE2) vpx_sad16x16_avg = vpx_sad16x16_avg_sse2;
vpx_sad16x16x4d = vpx_sad16x16x4d_c;
if (flags & HAS_SSE2) vpx_sad16x16x4d = vpx_sad16x16x4d_sse2;
vpx_sad16x32 = vpx_sad16x32_c;
if (flags & HAS_SSE2) vpx_sad16x32 = vpx_sad16x32_sse2;
vpx_sad16x32_avg = vpx_sad16x32_avg_c;
if (flags & HAS_SSE2) vpx_sad16x32_avg = vpx_sad16x32_avg_sse2;
vpx_sad16x32x4d = vpx_sad16x32x4d_c;
if (flags & HAS_SSE2) vpx_sad16x32x4d = vpx_sad16x32x4d_sse2;
vpx_sad16x8 = vpx_sad16x8_c;
if (flags & HAS_SSE2) vpx_sad16x8 = vpx_sad16x8_sse2;
vpx_sad16x8_avg = vpx_sad16x8_avg_c;
if (flags & HAS_SSE2) vpx_sad16x8_avg = vpx_sad16x8_avg_sse2;
vpx_sad16x8x4d = vpx_sad16x8x4d_c;
if (flags & HAS_SSE2) vpx_sad16x8x4d = vpx_sad16x8x4d_sse2;
vpx_sad32x16 = vpx_sad32x16_c;
if (flags & HAS_SSE2) vpx_sad32x16 = vpx_sad32x16_sse2;
if (flags & HAS_AVX2) vpx_sad32x16 = vpx_sad32x16_avx2;
vpx_sad32x16_avg = vpx_sad32x16_avg_c;
if (flags & HAS_SSE2) vpx_sad32x16_avg = vpx_sad32x16_avg_sse2;
if (flags & HAS_AVX2) vpx_sad32x16_avg = vpx_sad32x16_avg_avx2;
vpx_sad32x16x4d = vpx_sad32x16x4d_c;
if (flags & HAS_SSE2) vpx_sad32x16x4d = vpx_sad32x16x4d_sse2;
vpx_sad32x32 = vpx_sad32x32_c;
if (flags & HAS_SSE2) vpx_sad32x32 = vpx_sad32x32_sse2;
if (flags & HAS_AVX2) vpx_sad32x32 = vpx_sad32x32_avx2;
vpx_sad32x32_avg = vpx_sad32x32_avg_c;
if (flags & HAS_SSE2) vpx_sad32x32_avg = vpx_sad32x32_avg_sse2;
if (flags & HAS_AVX2) vpx_sad32x32_avg = vpx_sad32x32_avg_avx2;
vpx_sad32x32x4d = vpx_sad32x32x4d_c;
if (flags & HAS_SSE2) vpx_sad32x32x4d = vpx_sad32x32x4d_sse2;
if (flags & HAS_AVX2) vpx_sad32x32x4d = vpx_sad32x32x4d_avx2;
vpx_sad32x64 = vpx_sad32x64_c;
if (flags & HAS_SSE2) vpx_sad32x64 = vpx_sad32x64_sse2;
if (flags & HAS_AVX2) vpx_sad32x64 = vpx_sad32x64_avx2;
vpx_sad32x64_avg = vpx_sad32x64_avg_c;
if (flags & HAS_SSE2) vpx_sad32x64_avg = vpx_sad32x64_avg_sse2;
if (flags & HAS_AVX2) vpx_sad32x64_avg = vpx_sad32x64_avg_avx2;
vpx_sad32x64x4d = vpx_sad32x64x4d_c;
if (flags & HAS_SSE2) vpx_sad32x64x4d = vpx_sad32x64x4d_sse2;
vpx_sad4x4 = vpx_sad4x4_c;
if (flags & HAS_SSE2) vpx_sad4x4 = vpx_sad4x4_sse2;
vpx_sad4x4_avg = vpx_sad4x4_avg_c;
if (flags & HAS_SSE2) vpx_sad4x4_avg = vpx_sad4x4_avg_sse2;
vpx_sad4x4x4d = vpx_sad4x4x4d_c;
if (flags & HAS_SSE2) vpx_sad4x4x4d = vpx_sad4x4x4d_sse2;
vpx_sad4x8 = vpx_sad4x8_c;
if (flags & HAS_SSE2) vpx_sad4x8 = vpx_sad4x8_sse2;
vpx_sad4x8_avg = vpx_sad4x8_avg_c;
if (flags & HAS_SSE2) vpx_sad4x8_avg = vpx_sad4x8_avg_sse2;
vpx_sad4x8x4d = vpx_sad4x8x4d_c;
if (flags & HAS_SSE2) vpx_sad4x8x4d = vpx_sad4x8x4d_sse2;
vpx_sad64x32 = vpx_sad64x32_c;
if (flags & HAS_SSE2) vpx_sad64x32 = vpx_sad64x32_sse2;
if (flags & HAS_AVX2) vpx_sad64x32 = vpx_sad64x32_avx2;
vpx_sad64x32_avg = vpx_sad64x32_avg_c;
if (flags & HAS_SSE2) vpx_sad64x32_avg = vpx_sad64x32_avg_sse2;
if (flags & HAS_AVX2) vpx_sad64x32_avg = vpx_sad64x32_avg_avx2;
vpx_sad64x32x4d = vpx_sad64x32x4d_c;
if (flags & HAS_SSE2) vpx_sad64x32x4d = vpx_sad64x32x4d_sse2;
vpx_sad64x64 = vpx_sad64x64_c;
if (flags & HAS_SSE2) vpx_sad64x64 = vpx_sad64x64_sse2;
if (flags & HAS_AVX2) vpx_sad64x64 = vpx_sad64x64_avx2;
vpx_sad64x64_avg = vpx_sad64x64_avg_c;
if (flags & HAS_SSE2) vpx_sad64x64_avg = vpx_sad64x64_avg_sse2;
if (flags & HAS_AVX2) vpx_sad64x64_avg = vpx_sad64x64_avg_avx2;
vpx_sad64x64x4d = vpx_sad64x64x4d_c;
if (flags & HAS_SSE2) vpx_sad64x64x4d = vpx_sad64x64x4d_sse2;
if (flags & HAS_AVX2) vpx_sad64x64x4d = vpx_sad64x64x4d_avx2;
vpx_sad8x16 = vpx_sad8x16_c;
if (flags & HAS_SSE2) vpx_sad8x16 = vpx_sad8x16_sse2;
vpx_sad8x16_avg = vpx_sad8x16_avg_c;
if (flags & HAS_SSE2) vpx_sad8x16_avg = vpx_sad8x16_avg_sse2;
vpx_sad8x16x4d = vpx_sad8x16x4d_c;
if (flags & HAS_SSE2) vpx_sad8x16x4d = vpx_sad8x16x4d_sse2;
vpx_sad8x4 = vpx_sad8x4_c;
if (flags & HAS_SSE2) vpx_sad8x4 = vpx_sad8x4_sse2;
vpx_sad8x4_avg = vpx_sad8x4_avg_c;
if (flags & HAS_SSE2) vpx_sad8x4_avg = vpx_sad8x4_avg_sse2;
vpx_sad8x4x4d = vpx_sad8x4x4d_c;
if (flags & HAS_SSE2) vpx_sad8x4x4d = vpx_sad8x4x4d_sse2;
vpx_sad8x8 = vpx_sad8x8_c;
if (flags & HAS_SSE2) vpx_sad8x8 = vpx_sad8x8_sse2;
vpx_sad8x8_avg = vpx_sad8x8_avg_c;
if (flags & HAS_SSE2) vpx_sad8x8_avg = vpx_sad8x8_avg_sse2;
vpx_sad8x8x4d = vpx_sad8x8x4d_c;
if (flags & HAS_SSE2) vpx_sad8x8x4d = vpx_sad8x8x4d_sse2;
vpx_sad_skip_16x16 = vpx_sad_skip_16x16_c;
if (flags & HAS_SSE2) vpx_sad_skip_16x16 = vpx_sad_skip_16x16_sse2;
vpx_sad_skip_16x16x4d = vpx_sad_skip_16x16x4d_c;
if (flags & HAS_SSE2) vpx_sad_skip_16x16x4d = vpx_sad_skip_16x16x4d_sse2;
vpx_sad_skip_16x32 = vpx_sad_skip_16x32_c;
if (flags & HAS_SSE2) vpx_sad_skip_16x32 = vpx_sad_skip_16x32_sse2;
vpx_sad_skip_16x32x4d = vpx_sad_skip_16x32x4d_c;
if (flags & HAS_SSE2) vpx_sad_skip_16x32x4d = vpx_sad_skip_16x32x4d_sse2;
vpx_sad_skip_16x8 = vpx_sad_skip_16x8_c;
if (flags & HAS_SSE2) vpx_sad_skip_16x8 = vpx_sad_skip_16x8_sse2;
vpx_sad_skip_16x8x4d = vpx_sad_skip_16x8x4d_c;
if (flags & HAS_SSE2) vpx_sad_skip_16x8x4d = vpx_sad_skip_16x8x4d_sse2;
vpx_sad_skip_32x16 = vpx_sad_skip_32x16_c;
if (flags & HAS_SSE2) vpx_sad_skip_32x16 = vpx_sad_skip_32x16_sse2;
if (flags & HAS_AVX2) vpx_sad_skip_32x16 = vpx_sad_skip_32x16_avx2;
vpx_sad_skip_32x16x4d = vpx_sad_skip_32x16x4d_c;
if (flags & HAS_SSE2) vpx_sad_skip_32x16x4d = vpx_sad_skip_32x16x4d_sse2;
if (flags & HAS_AVX2) vpx_sad_skip_32x16x4d = vpx_sad_skip_32x16x4d_avx2;
vpx_sad_skip_32x32 = vpx_sad_skip_32x32_c;
if (flags & HAS_SSE2) vpx_sad_skip_32x32 = vpx_sad_skip_32x32_sse2;
if (flags & HAS_AVX2) vpx_sad_skip_32x32 = vpx_sad_skip_32x32_avx2;
vpx_sad_skip_32x32x4d = vpx_sad_skip_32x32x4d_c;
if (flags & HAS_SSE2) vpx_sad_skip_32x32x4d = vpx_sad_skip_32x32x4d_sse2;
if (flags & HAS_AVX2) vpx_sad_skip_32x32x4d = vpx_sad_skip_32x32x4d_avx2;
vpx_sad_skip_32x64 = vpx_sad_skip_32x64_c;
if (flags & HAS_SSE2) vpx_sad_skip_32x64 = vpx_sad_skip_32x64_sse2;
if (flags & HAS_AVX2) vpx_sad_skip_32x64 = vpx_sad_skip_32x64_avx2;
vpx_sad_skip_32x64x4d = vpx_sad_skip_32x64x4d_c;
if (flags & HAS_SSE2) vpx_sad_skip_32x64x4d = vpx_sad_skip_32x64x4d_sse2;
if (flags & HAS_AVX2) vpx_sad_skip_32x64x4d = vpx_sad_skip_32x64x4d_avx2;
vpx_sad_skip_4x8 = vpx_sad_skip_4x8_c;
if (flags & HAS_SSE2) vpx_sad_skip_4x8 = vpx_sad_skip_4x8_sse2;
vpx_sad_skip_4x8x4d = vpx_sad_skip_4x8x4d_c;
if (flags & HAS_SSE2) vpx_sad_skip_4x8x4d = vpx_sad_skip_4x8x4d_sse2;
vpx_sad_skip_64x32 = vpx_sad_skip_64x32_c;
if (flags & HAS_SSE2) vpx_sad_skip_64x32 = vpx_sad_skip_64x32_sse2;
if (flags & HAS_AVX2) vpx_sad_skip_64x32 = vpx_sad_skip_64x32_avx2;
vpx_sad_skip_64x32x4d = vpx_sad_skip_64x32x4d_c;
if (flags & HAS_SSE2) vpx_sad_skip_64x32x4d = vpx_sad_skip_64x32x4d_sse2;
if (flags & HAS_AVX2) vpx_sad_skip_64x32x4d = vpx_sad_skip_64x32x4d_avx2;
vpx_sad_skip_64x64 = vpx_sad_skip_64x64_c;
if (flags & HAS_SSE2) vpx_sad_skip_64x64 = vpx_sad_skip_64x64_sse2;
if (flags & HAS_AVX2) vpx_sad_skip_64x64 = vpx_sad_skip_64x64_avx2;
vpx_sad_skip_64x64x4d = vpx_sad_skip_64x64x4d_c;
if (flags & HAS_SSE2) vpx_sad_skip_64x64x4d = vpx_sad_skip_64x64x4d_sse2;
if (flags & HAS_AVX2) vpx_sad_skip_64x64x4d = vpx_sad_skip_64x64x4d_avx2;
vpx_sad_skip_8x16 = vpx_sad_skip_8x16_c;
if (flags & HAS_SSE2) vpx_sad_skip_8x16 = vpx_sad_skip_8x16_sse2;
vpx_sad_skip_8x16x4d = vpx_sad_skip_8x16x4d_c;
if (flags & HAS_SSE2) vpx_sad_skip_8x16x4d = vpx_sad_skip_8x16x4d_sse2;
vpx_sad_skip_8x8 = vpx_sad_skip_8x8_c;
if (flags & HAS_SSE2) vpx_sad_skip_8x8 = vpx_sad_skip_8x8_sse2;
vpx_sad_skip_8x8x4d = vpx_sad_skip_8x8x4d_c;
if (flags & HAS_SSE2) vpx_sad_skip_8x8x4d = vpx_sad_skip_8x8x4d_sse2;
vpx_satd = vpx_satd_c;
if (flags & HAS_SSE2) vpx_satd = vpx_satd_sse2;
if (flags & HAS_AVX2) vpx_satd = vpx_satd_avx2;
vpx_scaled_2d = vpx_scaled_2d_c;
if (flags & HAS_SSSE3) vpx_scaled_2d = vpx_scaled_2d_ssse3;
vpx_sse = vpx_sse_c;
if (flags & HAS_SSE4_1) vpx_sse = vpx_sse_sse4_1;
if (flags & HAS_AVX2) vpx_sse = vpx_sse_avx2;
vpx_sub_pixel_avg_variance16x16 = vpx_sub_pixel_avg_variance16x16_c;
if (flags & HAS_SSE2) vpx_sub_pixel_avg_variance16x16 = vpx_sub_pixel_avg_variance16x16_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_avg_variance16x16 = vpx_sub_pixel_avg_variance16x16_ssse3;
vpx_sub_pixel_avg_variance16x32 = vpx_sub_pixel_avg_variance16x32_c;
if (flags & HAS_SSE2) vpx_sub_pixel_avg_variance16x32 = vpx_sub_pixel_avg_variance16x32_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_avg_variance16x32 = vpx_sub_pixel_avg_variance16x32_ssse3;
vpx_sub_pixel_avg_variance16x8 = vpx_sub_pixel_avg_variance16x8_c;
if (flags & HAS_SSE2) vpx_sub_pixel_avg_variance16x8 = vpx_sub_pixel_avg_variance16x8_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_avg_variance16x8 = vpx_sub_pixel_avg_variance16x8_ssse3;
vpx_sub_pixel_avg_variance32x16 = vpx_sub_pixel_avg_variance32x16_c;
if (flags & HAS_SSE2) vpx_sub_pixel_avg_variance32x16 = vpx_sub_pixel_avg_variance32x16_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_avg_variance32x16 = vpx_sub_pixel_avg_variance32x16_ssse3;
vpx_sub_pixel_avg_variance32x32 = vpx_sub_pixel_avg_variance32x32_c;
if (flags & HAS_SSE2) vpx_sub_pixel_avg_variance32x32 = vpx_sub_pixel_avg_variance32x32_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_avg_variance32x32 = vpx_sub_pixel_avg_variance32x32_ssse3;
if (flags & HAS_AVX2) vpx_sub_pixel_avg_variance32x32 = vpx_sub_pixel_avg_variance32x32_avx2;
vpx_sub_pixel_avg_variance32x64 = vpx_sub_pixel_avg_variance32x64_c;
if (flags & HAS_SSE2) vpx_sub_pixel_avg_variance32x64 = vpx_sub_pixel_avg_variance32x64_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_avg_variance32x64 = vpx_sub_pixel_avg_variance32x64_ssse3;
vpx_sub_pixel_avg_variance4x4 = vpx_sub_pixel_avg_variance4x4_c;
if (flags & HAS_SSE2) vpx_sub_pixel_avg_variance4x4 = vpx_sub_pixel_avg_variance4x4_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_avg_variance4x4 = vpx_sub_pixel_avg_variance4x4_ssse3;
vpx_sub_pixel_avg_variance4x8 = vpx_sub_pixel_avg_variance4x8_c;
if (flags & HAS_SSE2) vpx_sub_pixel_avg_variance4x8 = vpx_sub_pixel_avg_variance4x8_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_avg_variance4x8 = vpx_sub_pixel_avg_variance4x8_ssse3;
vpx_sub_pixel_avg_variance64x32 = vpx_sub_pixel_avg_variance64x32_c;
if (flags & HAS_SSE2) vpx_sub_pixel_avg_variance64x32 = vpx_sub_pixel_avg_variance64x32_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_avg_variance64x32 = vpx_sub_pixel_avg_variance64x32_ssse3;
vpx_sub_pixel_avg_variance64x64 = vpx_sub_pixel_avg_variance64x64_c;
if (flags & HAS_SSE2) vpx_sub_pixel_avg_variance64x64 = vpx_sub_pixel_avg_variance64x64_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_avg_variance64x64 = vpx_sub_pixel_avg_variance64x64_ssse3;
if (flags & HAS_AVX2) vpx_sub_pixel_avg_variance64x64 = vpx_sub_pixel_avg_variance64x64_avx2;
vpx_sub_pixel_avg_variance8x16 = vpx_sub_pixel_avg_variance8x16_c;
if (flags & HAS_SSE2) vpx_sub_pixel_avg_variance8x16 = vpx_sub_pixel_avg_variance8x16_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_avg_variance8x16 = vpx_sub_pixel_avg_variance8x16_ssse3;
vpx_sub_pixel_avg_variance8x4 = vpx_sub_pixel_avg_variance8x4_c;
if (flags & HAS_SSE2) vpx_sub_pixel_avg_variance8x4 = vpx_sub_pixel_avg_variance8x4_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_avg_variance8x4 = vpx_sub_pixel_avg_variance8x4_ssse3;
vpx_sub_pixel_avg_variance8x8 = vpx_sub_pixel_avg_variance8x8_c;
if (flags & HAS_SSE2) vpx_sub_pixel_avg_variance8x8 = vpx_sub_pixel_avg_variance8x8_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_avg_variance8x8 = vpx_sub_pixel_avg_variance8x8_ssse3;
vpx_sub_pixel_variance16x16 = vpx_sub_pixel_variance16x16_c;
if (flags & HAS_SSE2) vpx_sub_pixel_variance16x16 = vpx_sub_pixel_variance16x16_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_variance16x16 = vpx_sub_pixel_variance16x16_ssse3;
vpx_sub_pixel_variance16x32 = vpx_sub_pixel_variance16x32_c;
if (flags & HAS_SSE2) vpx_sub_pixel_variance16x32 = vpx_sub_pixel_variance16x32_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_variance16x32 = vpx_sub_pixel_variance16x32_ssse3;
vpx_sub_pixel_variance16x8 = vpx_sub_pixel_variance16x8_c;
if (flags & HAS_SSE2) vpx_sub_pixel_variance16x8 = vpx_sub_pixel_variance16x8_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_variance16x8 = vpx_sub_pixel_variance16x8_ssse3;
vpx_sub_pixel_variance32x16 = vpx_sub_pixel_variance32x16_c;
if (flags & HAS_SSE2) vpx_sub_pixel_variance32x16 = vpx_sub_pixel_variance32x16_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_variance32x16 = vpx_sub_pixel_variance32x16_ssse3;
vpx_sub_pixel_variance32x32 = vpx_sub_pixel_variance32x32_c;
if (flags & HAS_SSE2) vpx_sub_pixel_variance32x32 = vpx_sub_pixel_variance32x32_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_variance32x32 = vpx_sub_pixel_variance32x32_ssse3;
if (flags & HAS_AVX2) vpx_sub_pixel_variance32x32 = vpx_sub_pixel_variance32x32_avx2;
vpx_sub_pixel_variance32x64 = vpx_sub_pixel_variance32x64_c;
if (flags & HAS_SSE2) vpx_sub_pixel_variance32x64 = vpx_sub_pixel_variance32x64_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_variance32x64 = vpx_sub_pixel_variance32x64_ssse3;
vpx_sub_pixel_variance4x4 = vpx_sub_pixel_variance4x4_c;
if (flags & HAS_SSE2) vpx_sub_pixel_variance4x4 = vpx_sub_pixel_variance4x4_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_variance4x4 = vpx_sub_pixel_variance4x4_ssse3;
vpx_sub_pixel_variance4x8 = vpx_sub_pixel_variance4x8_c;
if (flags & HAS_SSE2) vpx_sub_pixel_variance4x8 = vpx_sub_pixel_variance4x8_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_variance4x8 = vpx_sub_pixel_variance4x8_ssse3;
vpx_sub_pixel_variance64x32 = vpx_sub_pixel_variance64x32_c;
if (flags & HAS_SSE2) vpx_sub_pixel_variance64x32 = vpx_sub_pixel_variance64x32_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_variance64x32 = vpx_sub_pixel_variance64x32_ssse3;
vpx_sub_pixel_variance64x64 = vpx_sub_pixel_variance64x64_c;
if (flags & HAS_SSE2) vpx_sub_pixel_variance64x64 = vpx_sub_pixel_variance64x64_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_variance64x64 = vpx_sub_pixel_variance64x64_ssse3;
if (flags & HAS_AVX2) vpx_sub_pixel_variance64x64 = vpx_sub_pixel_variance64x64_avx2;
vpx_sub_pixel_variance8x16 = vpx_sub_pixel_variance8x16_c;
if (flags & HAS_SSE2) vpx_sub_pixel_variance8x16 = vpx_sub_pixel_variance8x16_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_variance8x16 = vpx_sub_pixel_variance8x16_ssse3;
vpx_sub_pixel_variance8x4 = vpx_sub_pixel_variance8x4_c;
if (flags & HAS_SSE2) vpx_sub_pixel_variance8x4 = vpx_sub_pixel_variance8x4_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_variance8x4 = vpx_sub_pixel_variance8x4_ssse3;
vpx_sub_pixel_variance8x8 = vpx_sub_pixel_variance8x8_c;
if (flags & HAS_SSE2) vpx_sub_pixel_variance8x8 = vpx_sub_pixel_variance8x8_sse2;
if (flags & HAS_SSSE3) vpx_sub_pixel_variance8x8 = vpx_sub_pixel_variance8x8_ssse3;
vpx_subtract_block = vpx_subtract_block_c;
if (flags & HAS_SSE2) vpx_subtract_block = vpx_subtract_block_sse2;
if (flags & HAS_AVX2) vpx_subtract_block = vpx_subtract_block_avx2;
vpx_sum_squares_2d_i16 = vpx_sum_squares_2d_i16_c;
if (flags & HAS_SSE2) vpx_sum_squares_2d_i16 = vpx_sum_squares_2d_i16_sse2;
vpx_tm_predictor_16x16 = vpx_tm_predictor_16x16_c;
if (flags & HAS_SSE2) vpx_tm_predictor_16x16 = vpx_tm_predictor_16x16_sse2;
vpx_tm_predictor_32x32 = vpx_tm_predictor_32x32_c;
if (flags & HAS_SSE2) vpx_tm_predictor_32x32 = vpx_tm_predictor_32x32_sse2;
vpx_tm_predictor_4x4 = vpx_tm_predictor_4x4_c;
if (flags & HAS_SSE2) vpx_tm_predictor_4x4 = vpx_tm_predictor_4x4_sse2;
vpx_tm_predictor_8x8 = vpx_tm_predictor_8x8_c;
if (flags & HAS_SSE2) vpx_tm_predictor_8x8 = vpx_tm_predictor_8x8_sse2;
vpx_v_predictor_16x16 = vpx_v_predictor_16x16_c;
if (flags & HAS_SSE2) vpx_v_predictor_16x16 = vpx_v_predictor_16x16_sse2;
vpx_v_predictor_32x32 = vpx_v_predictor_32x32_c;
if (flags & HAS_SSE2) vpx_v_predictor_32x32 = vpx_v_predictor_32x32_sse2;
vpx_v_predictor_4x4 = vpx_v_predictor_4x4_c;
if (flags & HAS_SSE2) vpx_v_predictor_4x4 = vpx_v_predictor_4x4_sse2;
vpx_v_predictor_8x8 = vpx_v_predictor_8x8_c;
if (flags & HAS_SSE2) vpx_v_predictor_8x8 = vpx_v_predictor_8x8_sse2;
vpx_variance16x16 = vpx_variance16x16_c;
if (flags & HAS_SSE2) vpx_variance16x16 = vpx_variance16x16_sse2;
if (flags & HAS_AVX2) vpx_variance16x16 = vpx_variance16x16_avx2;
vpx_variance16x32 = vpx_variance16x32_c;
if (flags & HAS_SSE2) vpx_variance16x32 = vpx_variance16x32_sse2;
if (flags & HAS_AVX2) vpx_variance16x32 = vpx_variance16x32_avx2;
vpx_variance16x8 = vpx_variance16x8_c;
if (flags & HAS_SSE2) vpx_variance16x8 = vpx_variance16x8_sse2;
if (flags & HAS_AVX2) vpx_variance16x8 = vpx_variance16x8_avx2;
vpx_variance32x16 = vpx_variance32x16_c;
if (flags & HAS_SSE2) vpx_variance32x16 = vpx_variance32x16_sse2;
if (flags & HAS_AVX2) vpx_variance32x16 = vpx_variance32x16_avx2;
vpx_variance32x32 = vpx_variance32x32_c;
if (flags & HAS_SSE2) vpx_variance32x32 = vpx_variance32x32_sse2;
if (flags & HAS_AVX2) vpx_variance32x32 = vpx_variance32x32_avx2;
vpx_variance32x64 = vpx_variance32x64_c;
if (flags & HAS_SSE2) vpx_variance32x64 = vpx_variance32x64_sse2;
if (flags & HAS_AVX2) vpx_variance32x64 = vpx_variance32x64_avx2;
vpx_variance4x4 = vpx_variance4x4_c;
if (flags & HAS_SSE2) vpx_variance4x4 = vpx_variance4x4_sse2;
vpx_variance4x8 = vpx_variance4x8_c;
if (flags & HAS_SSE2) vpx_variance4x8 = vpx_variance4x8_sse2;
vpx_variance64x32 = vpx_variance64x32_c;
if (flags & HAS_SSE2) vpx_variance64x32 = vpx_variance64x32_sse2;
if (flags & HAS_AVX2) vpx_variance64x32 = vpx_variance64x32_avx2;
vpx_variance64x64 = vpx_variance64x64_c;
if (flags & HAS_SSE2) vpx_variance64x64 = vpx_variance64x64_sse2;
if (flags & HAS_AVX2) vpx_variance64x64 = vpx_variance64x64_avx2;
vpx_variance8x16 = vpx_variance8x16_c;
if (flags & HAS_SSE2) vpx_variance8x16 = vpx_variance8x16_sse2;
if (flags & HAS_AVX2) vpx_variance8x16 = vpx_variance8x16_avx2;
vpx_variance8x4 = vpx_variance8x4_c;
if (flags & HAS_SSE2) vpx_variance8x4 = vpx_variance8x4_sse2;
if (flags & HAS_AVX2) vpx_variance8x4 = vpx_variance8x4_avx2;
vpx_variance8x8 = vpx_variance8x8_c;
if (flags & HAS_SSE2) vpx_variance8x8 = vpx_variance8x8_sse2;
if (flags & HAS_AVX2) vpx_variance8x8 = vpx_variance8x8_avx2;
vpx_vector_var = vpx_vector_var_c;
if (flags & HAS_SSE2) vpx_vector_var = vpx_vector_var_sse2;
}