File tree 2 files changed +8
-8
lines changed
2 files changed +8
-8
lines changed Original file line number Diff line number Diff line change @@ -58,8 +58,8 @@ https://github.com/oobabooga/exllamav2/releases/download/v0.1.7/exllamav2-0.1.7+
58
58
https://github.com/oobabooga/exllamav2/releases/download/v0.1.7/exllamav2-0.1.7+cu121.torch2.2.2-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
59
59
https://github.com/oobabooga/exllamav2/releases/download/v0.1.7/exllamav2-0.1.7+cu121.torch2.2.2-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
60
60
https://github.com/oobabooga/exllamav2/releases/download/v0.1.7/exllamav2-0.1.7-py3-none-any.whl; platform_system == "Linux" and platform_machine != "x86_64"
61
- https://github.com/oobabooga/flash-attention/releases/download/v2.5.9.post1 /flash_attn-2.5.9.post1 +cu122torch2.2.2cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
62
- https://github.com/oobabooga/flash-attention/releases/download/v2.5.9.post1 /flash_attn-2.5.9.post1 +cu122torch2.2.2cxx11abiFALSE-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
63
- https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.9.post1 /flash_attn-2.5.9.post1+cu122torch2 .2cxx11abiFALSE-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
64
- https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.9.post1 /flash_attn-2.5.9.post1+cu122torch2 .2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
61
+ https://github.com/oobabooga/flash-attention/releases/download/v2.6.1 /flash_attn-2.6.1 +cu122torch2.2.2cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
62
+ https://github.com/oobabooga/flash-attention/releases/download/v2.6.1 /flash_attn-2.6.1 +cu122torch2.2.2cxx11abiFALSE-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
63
+ https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.1 /flash_attn-2.6.1+cu123torch2 .2cxx11abiFALSE-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
64
+ https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.1 /flash_attn-2.6.1+cu123torch2 .2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
65
65
autoawq == 0.2.5 ; platform_system == "Linux" or platform_system == "Windows"
Original file line number Diff line number Diff line change @@ -58,8 +58,8 @@ https://github.com/oobabooga/exllamav2/releases/download/v0.1.7/exllamav2-0.1.7+
58
58
https://github.com/oobabooga/exllamav2/releases/download/v0.1.7/exllamav2-0.1.7+cu121.torch2.2.2-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
59
59
https://github.com/oobabooga/exllamav2/releases/download/v0.1.7/exllamav2-0.1.7+cu121.torch2.2.2-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
60
60
https://github.com/oobabooga/exllamav2/releases/download/v0.1.7/exllamav2-0.1.7-py3-none-any.whl; platform_system == "Linux" and platform_machine != "x86_64"
61
- https://github.com/oobabooga/flash-attention/releases/download/v2.5.9.post1 /flash_attn-2.5.9.post1 +cu122torch2.2.2cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
62
- https://github.com/oobabooga/flash-attention/releases/download/v2.5.9.post1 /flash_attn-2.5.9.post1 +cu122torch2.2.2cxx11abiFALSE-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
63
- https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.9.post1 /flash_attn-2.5.9.post1+cu122torch2 .2cxx11abiFALSE-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
64
- https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.9.post1 /flash_attn-2.5.9.post1+cu122torch2 .2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
61
+ https://github.com/oobabooga/flash-attention/releases/download/v2.6.1 /flash_attn-2.6.1 +cu122torch2.2.2cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
62
+ https://github.com/oobabooga/flash-attention/releases/download/v2.6.1 /flash_attn-2.6.1 +cu122torch2.2.2cxx11abiFALSE-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
63
+ https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.1 /flash_attn-2.6.1+cu123torch2 .2cxx11abiFALSE-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
64
+ https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.1 /flash_attn-2.6.1+cu123torch2 .2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
65
65
autoawq==0.2.5; platform_system == "Linux" or platform_system == "Windows"
You can’t perform that action at this time.
0 commit comments