A temporal dependency occurs when developers change two or more files at the same time (i.e. they are a part of the same commit).
Pairs | # same commits | # commits 1 | # commits 2 | latest commit |
---|---|---|---|---|
packages/tasks/src/model-libraries-snippets.ts packages/tasks/src/model-libraries.ts |
5 | 103 (4%) | 151 (3%) | 2025-06-25 |
packages/inference/src/snippets/getInferenceSnippets.ts packages/tasks-gen/scripts/generate-snippets-fixtures.ts |
4 | 20 (20%) | 22 (18%) | 2025-06-10 |
packages/inference/src/lib/makeRequestOptions.ts packages/inference/src/lib/getInferenceProviderMapping.ts |
3 | 53 (5%) | 11 (27%) | 2025-06-18 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/python/openai/0.fireworks-ai.py packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/requests/0.together.py |
2 | 7 (28%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/python/requests/0.fireworks-ai.py packages/tasks-gen/snippets-fixtures/conversational-llm-stream/python/openai/0.together.py |
2 | 6 (33%) | 7 (28%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-speech/js/fetch/0.fal-ai.js packages/tasks-gen/snippets-fixtures/conversational-llm-stream/python/requests/0.together.py |
2 | 3 (66%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/js/openai/0.fireworks-ai.js packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/js/openai/0.together.js |
2 | 6 (33%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-stream/js/openai/0.fireworks-ai.js packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/js/openai/0.fireworks-ai.js |
2 | 6 (33%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-stream/js/huggingface.js/0.fireworks-ai.js packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/js/huggingface.js/0.fireworks-ai.js |
2 | 7 (28%) | 6 (33%) | 2025-06-17 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-stream/python/openai/0.fireworks-ai.py packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/python/requests/0.fireworks-ai.py |
2 | 7 (28%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-image/js/fetch/0.fal-ai.js packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/openai/0.together.py |
2 | 5 (40%) | 7 (28%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-image--lora/js/fetch/0.fal-ai.js packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/openai/0.together.py |
2 | 3 (66%) | 7 (28%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/js/openai/0.fireworks-ai.js packages/tasks-gen/snippets-fixtures/conversational-llm-stream/python/openai/0.together.py |
2 | 6 (33%) | 7 (28%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-speech/python/requests/0.fal-ai.py packages/tasks-gen/snippets-fixtures/conversational-vlm-stream/js/openai/0.fireworks-ai.js |
2 | 3 (66%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/python/requests/0.fireworks-ai.py packages/tasks-gen/snippets-fixtures/conversational-llm-stream/python/requests/0.together.py |
2 | 6 (33%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-stream/js/huggingface.js/0.fireworks-ai.js packages/inference/src/snippets/getInferenceSnippets.ts |
2 | 7 (28%) | 20 (10%) | 2025-06-17 |
packages/tasks-gen/snippets-fixtures/conversational-llm-stream/python/openai/0.together.py packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/js/openai/0.together.js |
2 | 7 (28%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-speech/js/fetch/0.fal-ai.js packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/openai/0.together.py |
2 | 3 (66%) | 7 (28%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/requests/0.together.py packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/openai/0.together.py |
2 | 6 (33%) | 7 (28%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-stream/python/openai/0.fireworks-ai.py packages/tasks-gen/snippets-fixtures/conversational-llm-stream/python/requests/0.together.py |
2 | 7 (28%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/js/openai/0.fireworks-ai.js packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/requests/0.together.py |
2 | 6 (33%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-llm-stream/python/openai/0.together.py packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/requests/0.together.py |
2 | 7 (28%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-image--lora/js/fetch/0.fal-ai.js packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/python/openai/0.fireworks-ai.py |
2 | 3 (66%) | 7 (28%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/python/openai/0.fireworks-ai.py packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/openai/0.together.py |
2 | 7 (28%) | 7 (28%) | 2025-06-04 |
packages/inference/src/providers/replicate.ts packages/inference/src/lib/getProviderHelper.ts |
2 | 17 (11%) | 13 (15%) | 2025-06-27 |
packages/tasks-gen/snippets-fixtures/text-to-image--lora/js/fetch/0.fal-ai.js packages/tasks-gen/snippets-fixtures/conversational-vlm-stream/python/openai/0.fireworks-ai.py |
2 | 3 (66%) | 7 (28%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/js/openai/0.fireworks-ai.js packages/tasks-gen/snippets-fixtures/conversational-llm-stream/js/openai/0.together.js |
2 | 6 (33%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/python/requests/0.fireworks-ai.py packages/inference/src/snippets/getInferenceSnippets.ts |
2 | 6 (33%) | 20 (10%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-speech/js/fetch/0.fal-ai.js packages/tasks-gen/snippets-fixtures/conversational-vlm-stream/python/openai/0.fireworks-ai.py |
2 | 3 (66%) | 7 (28%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-image/js/fetch/0.fal-ai.js packages/tasks-gen/snippets-fixtures/conversational-llm-stream/js/openai/0.together.js |
2 | 5 (40%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-llm-stream/js/openai/0.together.js packages/inference/src/snippets/getInferenceSnippets.ts |
2 | 6 (33%) | 20 (10%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-image/js/fetch/0.fal-ai.js packages/tasks-gen/snippets-fixtures/conversational-vlm-stream/js/openai/0.fireworks-ai.js |
2 | 5 (40%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/js/openai/0.fireworks-ai.js packages/inference/src/snippets/getInferenceSnippets.ts |
2 | 6 (33%) | 20 (10%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/python/openai/0.fireworks-ai.py packages/inference/src/snippets/getInferenceSnippets.ts |
2 | 7 (28%) | 20 (10%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-speech/python/requests/0.fal-ai.py packages/tasks-gen/snippets-fixtures/text-to-speech/js/fetch/0.fal-ai.js |
2 | 3 (66%) | 3 (66%) | 2025-06-04 |
packages/inference/src/types.ts packages/inference/src/providers/consts.ts |
2 | 42 (4%) | 19 (10%) | 2025-06-18 |
packages/tasks-gen/snippets-fixtures/text-to-speech/python/requests/0.fal-ai.py packages/tasks-gen/snippets-fixtures/conversational-llm-stream/js/openai/0.together.js |
2 | 3 (66%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-stream/python/openai/0.fireworks-ai.py packages/inference/src/snippets/getInferenceSnippets.ts |
2 | 7 (28%) | 20 (10%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-speech/js/fetch/0.fal-ai.js packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/python/openai/0.fireworks-ai.py |
2 | 3 (66%) | 7 (28%) | 2025-06-04 |
packages/inference/src/providers/consts.ts packages/inference/src/lib/makeRequestOptions.ts |
2 | 19 (10%) | 53 (3%) | 2025-06-18 |
packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/openai/0.together.py packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/js/openai/0.together.js |
2 | 7 (28%) | 6 (33%) | 2025-06-04 |
packages/inference/src/types.ts packages/inference/src/lib/getInferenceProviderMapping.ts |
2 | 42 (4%) | 11 (18%) | 2025-06-18 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/python/huggingface_hub/0.fireworks-ai.py packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/js/huggingface.js/0.fireworks-ai.js |
2 | 6 (33%) | 6 (33%) | 2025-06-17 |
packages/tasks-gen/snippets-fixtures/text-to-image--lora/js/fetch/0.fal-ai.js packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/requests/0.together.py |
2 | 3 (66%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-image--lora/js/fetch/0.fal-ai.js packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/js/openai/0.together.js |
2 | 3 (66%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-llm-stream/js/openai/0.together.js packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/requests/0.together.py |
2 | 6 (33%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-stream/python/openai/0.fireworks-ai.py packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/openai/0.together.py |
2 | 7 (28%) | 7 (28%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-speech/python/requests/0.fal-ai.py packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/openai/0.together.py |
2 | 3 (66%) | 7 (28%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/text-to-speech/python/requests/0.fal-ai.py packages/tasks-gen/snippets-fixtures/conversational-vlm-non-stream/python/requests/0.fireworks-ai.py |
2 | 3 (66%) | 6 (33%) | 2025-06-04 |
packages/tasks-gen/snippets-fixtures/conversational-vlm-stream/python/requests/0.fireworks-ai.py packages/tasks-gen/snippets-fixtures/conversational-llm-non-stream/python/openai/0.together.py |
2 | 6 (33%) | 7 (28%) | 2025-06-04 |