26 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (108:138, 10%) - src/fairseq/fairseq/modules/multihead_attention.py (177:207, 7%) 17 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (295:313, 7%) - src/fairseq/fairseq/modules/multihead_attention.py (423:441, 4%) 16 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (247:267, 6%) - src/fairseq/fairseq/modules/multihead_attention.py (370:390, 4%) 10 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/transformer_sentence_encoder.py (43:52, 25%) - src/fairseq/fairseq/modules/transformer_sentence_encoder.py (194:203, 5%) 10 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/transformer_sentence_encoder_layer.py (61:70, 18%) - src/fairseq/fairseq/modules/transformer_sentence_encoder_layer.py (114:123, 10%) 10 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (193:207, 4%) - src/fairseq/fairseq/modules/multihead_attention.py (278:292, 2%) 9 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (174:182, 3%) - src/fairseq/fairseq/modules/multihead_attention.py (259:267, 2%) 9 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (64:74, 3%) - src/fairseq/fairseq/modules/multihead_attention.py (47:57, 2%) 9 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (64:74, 3%) - src/fairseq/fairseq/modules/longformer_multihead_attention.py (60:70, 2%) 8 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/transformer_sentence_encoder.py (56:63, 20%) - src/fairseq/fairseq/modules/transformer_sentence_encoder.py (208:215, 4%) 8 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (34:41, 3%) - src/fairseq/fairseq/modules/multihead_attention.py (25:32, 2%) 8 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (165:172, 3%) - src/fairseq/fairseq/modules/multihead_attention.py (250:257, 2%) 8 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (34:41, 3%) - src/fairseq/fairseq/modules/longformer_multihead_attention.py (37:44, 2%) 7 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (83:89, 2%) - src/fairseq/fairseq/modules/multihead_attention.py (106:112, 1%) 7 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (83:89, 2%) - src/fairseq/fairseq/modules/longformer_multihead_attention.py (145:151, 2%) 6 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (184:189, 2%) - src/fairseq/fairseq/modules/multihead_attention.py (269:274, 1%) 6 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (279:284, 2%) - src/fairseq/fairseq/modules/multihead_attention.py (403:408, 1%) 6 duplicated lines in: - src/fairseq/fairseq/model_parallel/modules/multihead_attention.py (156:163, 2%) - src/fairseq/fairseq/modules/multihead_attention.py (241:248, 1%)