File age measurements show the distribution of file ages (days since the first commit) and the recency of file updates (days since the latest commit).
File | # lines | # units | last modified (days ago) | created (days ago) | # changes |
---|---|---|---|---|---|
softmax.py in xformers/triton |
114 | 5 | 43 | 99 | 6 |
block_factory.py in xformers/factory |
294 | 18 | 21 | 98 | 8 |
residual.py in xformers/components |
47 | 7 | 68 | 98 | 4 |
__init__.py in xformers/components |
40 | 1 | 6 | 98 | 7 |
k_layer_norm.py in xformers/triton |
225 | 10 | 88 | 97 | 2 |
nystrom.py in xformers/components/attention |
194 | 4 | 70 | 97 | 5 |
attention_patterns.py in xformers/components/attention |
148 | 21 | 97 | 97 | 1 |
blocksparse.py in xformers/components/attention |
129 | 3 | 50 | 97 | 8 |
scaled_dot_product.py in xformers/components/attention |
81 | 2 | 58 | 97 | 7 |
layer_norm.py in xformers/triton |
45 | 3 | 75 | 97 | 5 |
benchmark_encoder.py in xformers/benchmarks |
314 | 7 | 95 | 95 | 1 |
utils.py in xformers/triton |
29 | 3 | 56 | 95 | 2 |
multi_head_dispatch.py in xformers/components |
141 | 6 | 6 | 93 | 4 |
in_proj_container.py in xformers/components |
131 | 5 | 12 | 93 | 2 |
batch_fetch_results.py in xformers/benchmarks/LRA |
81 | - | 92 | 92 | 1 |
utils.py in xformers/components/attention |
70 | 5 | 75 | 92 | 4 |
base.py in xformers/components/attention |
42 | 4 | 6 | 92 | 5 |
__init__.pyi in stubs/torch |
2344 | 419 | 68 | 90 | 3 |
run_tasks.py in xformers/benchmarks/LRA |
467 | 10 | 90 | 90 | 1 |
__init__.pyi in stubs/torch/nn |
238 | 50 | 68 | 90 | 2 |
File | # lines | # units | last modified (days ago) | created (days ago) | # changes |
---|---|---|---|---|---|
attention_patterns.py in xformers/components/attention |
148 | 21 | 97 | 97 | 1 |
benchmark_encoder.py in xformers/benchmarks |
314 | 7 | 95 | 95 | 1 |
batch_fetch_results.py in xformers/benchmarks/LRA |
81 | - | 92 | 92 | 1 |
k_softmax.py in xformers/triton |
98 | 3 | 90 | 90 | 1 |
run_tasks.py in xformers/benchmarks/LRA |
467 | 10 | 90 | 90 | 1 |
k_layer_norm.py in xformers/triton |
225 | 10 | 88 | 97 | 2 |
benchmark_revnet.py in xformers/benchmarks |
61 | 1 | 86 | 86 | 1 |
sine.py in xformers/components/positional_embedding |
40 | 2 | 84 | 84 | 1 |
__init__.py in xformers/components/positional_embedding |
41 | 1 | 84 | 90 | 2 |
k_fused_matmul_fw.py in xformers/triton |
98 | 3 | 78 | 78 | 1 |
benchmark_triton_fused_linear.py in xformers/benchmarks |
110 | 3 | 78 | 78 | 1 |
pkg_helpers.bash in packaging |
64 | - | 77 | 77 | 1 |
setup.py in root |
113 | 4 | 77 | 77 | 1 |
fused_mlp.py in xformers/components/feedforward |
42 | 2 | 75 | 75 | 1 |
layer_norm.py in xformers/triton |
45 | 3 | 75 | 97 | 5 |
utils.py in xformers/components/attention |
70 | 5 | 75 | 92 | 4 |
nystrom.py in xformers/components/attention |
194 | 4 | 70 | 97 | 5 |
onnx.pyi in stubs/torch |
1 | - | 68 | 90 | 2 |
__init__.pyi in stubs/torch/profiler |
2 | 1 | 68 | 90 | 2 |
model_zoo.pyi in stubs/torch/utils |
2 | - | 68 | 90 | 2 |
File | # lines | # units | last modified (days ago) | created (days ago) | # changes |
---|---|---|---|---|---|
spmm.cu in xformers/components/attention/csrc/cuda |
775 | - | |||
sddmm2_cuda.cu in xformers/components/attention/csrc/cuda |
576 | - | |||
sddmm.cu in xformers/components/attention/csrc/cuda |
351 | - | |||
sparse_softmax.cu in xformers/components/attention/csrc/cuda |
268 | - | |||
benchmark_core.py in xformers/benchmarks |
225 | 4 | |||
sparse_softmax.cpp in xformers/components/attention/csrc/cpu |
168 | 4 | |||
softmax.py in xformers/components/attention/feature_maps |
146 | 11 | |||
computeUtil.h in xformers/components/attention/csrc |
122 | 14 | |||
run_grid_search.py in xformers/benchmarks/LRA |
116 | 2 | |||
benchmark_triton_blocksparse.py in xformers/benchmarks |
96 | 1 | |||
spmm.cpp in xformers/components/attention/csrc/cpu |
90 | 2 | |||
sddmm.cpp in xformers/components/attention/csrc/cpu |
87 | 2 | |||
matmul.cu in xformers/components/attention/csrc/cuda |
86 | - | |||
benchmark_sddmm.py in xformers/benchmarks |
85 | 2 | |||
benchmark_nystrom_utils.py in xformers/benchmarks |
76 | 2 | |||
matmul.cpp in xformers/components/attention/csrc/cpu |
73 | 2 | |||
utils.py in xformers |
61 | 6 | |||
benchmark_triton_layernorm.py in xformers/benchmarks |
61 | 2 | |||
benchmark_triton_softmax.py in xformers/benchmarks |
61 | 8 | |||
matmul.cpp in xformers/components/attention/csrc/autograd |
48 | 3 |
File | # lines | # units | last modified (days ago) | created (days ago) | # changes |
---|---|---|---|---|---|
spmm.cu in xformers/components/attention/csrc/cuda |
775 | - | |||
sddmm2_cuda.cu in xformers/components/attention/csrc/cuda |
576 | - | |||
sddmm.cu in xformers/components/attention/csrc/cuda |
351 | - | |||
sparse_softmax.cu in xformers/components/attention/csrc/cuda |
268 | - | |||
benchmark_core.py in xformers/benchmarks |
225 | 4 | |||
sparse_softmax.cpp in xformers/components/attention/csrc/cpu |
168 | 4 | |||
softmax.py in xformers/components/attention/feature_maps |
146 | 11 | |||
computeUtil.h in xformers/components/attention/csrc |
122 | 14 | |||
run_grid_search.py in xformers/benchmarks/LRA |
116 | 2 | |||
benchmark_triton_blocksparse.py in xformers/benchmarks |
96 | 1 | |||
spmm.cpp in xformers/components/attention/csrc/cpu |
90 | 2 | |||
sddmm.cpp in xformers/components/attention/csrc/cpu |
87 | 2 | |||
matmul.cu in xformers/components/attention/csrc/cuda |
86 | - | |||
benchmark_sddmm.py in xformers/benchmarks |
85 | 2 | |||
benchmark_nystrom_utils.py in xformers/benchmarks |
76 | 2 | |||
matmul.cpp in xformers/components/attention/csrc/cpu |
73 | 2 | |||
utils.py in xformers |
61 | 6 | |||
benchmark_triton_layernorm.py in xformers/benchmarks |
61 | 2 | |||
benchmark_triton_softmax.py in xformers/benchmarks |
61 | 8 | |||
matmul.cpp in xformers/components/attention/csrc/autograd |
48 | 3 |