The distribution of size of files (measured in lines of code).
File | # lines | # units |
---|---|---|
__init__.pyi in stubs/torch |
2344 | 419 |
spmm.cu in xformers/components/attention/csrc/cuda |
775 | - |
sddmm2_cuda.cu in xformers/components/attention/csrc/cuda |
576 | - |
run_tasks.py in xformers/benchmarks/LRA |
467 | 10 |
csr_tensor.py in xformers/sparse |
365 | 23 |
sddmm.cu in xformers/components/attention/csrc/cuda |
351 | - |
benchmark_encoder.py in xformers/benchmarks |
314 | 7 |
benchmark_vit_timm.py in xformers/benchmarks |
311 | 13 |
block_factory.py in xformers/factory |
294 | 18 |
sparse_softmax.cu in xformers/components/attention/csrc/cuda |
268 | - |
triton_v2_ragged_qk_dotprod.py in experimental/ragged_inference |
252 | 7 |
__init__.pyi in stubs/torch/nn |
238 | 50 |
k_layer_norm.py in xformers/triton |
225 | 10 |
benchmark_core.py in xformers/benchmarks |
225 | 4 |
compositional.py in xformers/components/attention |
220 | 4 |
ortho.py in xformers/components/attention |
199 | 6 |
nystrom.py in xformers/components/attention |
194 | 4 |
benchmark_pytorch_transformer.py in xformers/benchmarks |
183 | 9 |
sparse_softmax.cpp in xformers/components/attention/csrc/cpu |
168 | 4 |
triton_v2_qk_dotprod.py in experimental/ragged_inference |
159 | 6 |
core.py in xformers/components/attention |
154 | 11 |
model_factory.py in xformers/factory |
149 | 6 |
dropout.py in xformers/triton |
148 | 5 |
attention_patterns.py in xformers/components/attention |
148 | 21 |
model_wrapper.py in xformers/benchmarks/LRA/code |
147 | 12 |
softmax.py in xformers/components/attention/feature_maps |
146 | 11 |
triton_v2_matmul.py in experimental/ragged_inference |
144 | 6 |
k_dropout.py in xformers/triton |
141 | 4 |
multi_head_dispatch.py in xformers/components |
141 | 6 |
in_proj_container.py in xformers/components |
131 | 5 |
blocksparse.py in xformers/components/attention |
129 | 3 |
computeUtil.h in xformers/components/attention/csrc |
122 | 14 |
_csr_ops.py in xformers/sparse |
121 | 9 |
seq_kv_cache.py in experimental/ragged_inference |
119 | 14 |
run_grid_search.py in xformers/benchmarks/LRA |
116 | 2 |
softmax.py in xformers/triton |
114 | 5 |
setup.py in root |
113 | 4 |
favor.py in xformers/components/attention |
113 | 4 |
run_with_submitit.py in xformers/benchmarks/LRA |
110 | 8 |
benchmark_triton_fused_linear.py in xformers/benchmarks |
110 | 3 |
reversible.py in xformers/components |
100 | 10 |
__init__.pyi in stubs/numpy |
99 | 22 |
k_fused_matmul_fw.py in xformers/triton |
98 | 3 |
k_softmax.py in xformers/triton |
98 | 3 |
benchmark_triton_blocksparse.py in xformers/benchmarks |
96 | 1 |
garbage_pad_ragged_acts.py in experimental/ragged_inference |
93 | 11 |
spmm.cpp in xformers/components/attention/csrc/cpu |
90 | 2 |
_sputnik_sparse.py in xformers/components/attention |
88 | 23 |
fused_linear_layer.py in xformers/triton |
87 | 5 |
sddmm.cpp in xformers/components/attention/csrc/cpu |
87 | 2 |
File | # lines | # units |
---|---|---|
__init__.pyi in stubs/torch |
2344 | 419 |
__init__.pyi in stubs/torch/nn |
238 | 50 |
_sputnik_sparse.py in xformers/components/attention |
88 | 23 |
csr_tensor.py in xformers/sparse |
365 | 23 |
__init__.pyi in stubs/numpy |
99 | 22 |
attention_patterns.py in xformers/components/attention |
148 | 21 |
block_factory.py in xformers/factory |
294 | 18 |
seq_kv_cache.py in experimental/ragged_inference |
119 | 14 |
computeUtil.h in xformers/components/attention/csrc |
122 | 14 |
attention_mask.py in xformers/components/attention |
84 | 14 |
benchmark_vit_timm.py in xformers/benchmarks |
311 | 13 |
k_activations.py in xformers/triton |
72 | 12 |
model_wrapper.py in xformers/benchmarks/LRA/code |
147 | 12 |
garbage_pad_ragged_acts.py in experimental/ragged_inference |
93 | 11 |
core.py in xformers/components/attention |
154 | 11 |
softmax.py in xformers/components/attention/feature_maps |
146 | 11 |
k_layer_norm.py in xformers/triton |
225 | 10 |
reversible.py in xformers/components |
100 | 10 |
run_tasks.py in xformers/benchmarks/LRA |
467 | 10 |
utils.py in xformers/sparse |
70 | 10 |
There are 4 files with lines longer than 120 characters. In total, there are 6 long lines.
File | # lines | # units | # long lines |
---|---|---|---|
sddmm2_cuda.cu in xformers/components/attention/csrc/cuda |
576 | - | 2 |
sparse_softmax.cpp in xformers/components/attention/csrc |
8 | - | 2 |
sddmm.cpp in xformers/components/attention/csrc |
6 | - | 1 |
spmm.cpp in xformers/components/attention/csrc |
6 | - | 1 |