Skip to content

Commit

Permalink
update flash to 2.5.7
Browse files Browse the repository at this point in the history
ghstack-source-id: 80926e210412d5e00ff714ecb208386b15ab2029
Pull Request resolved: fairinternal/xformers#1084

__original_commit__ = fairinternal/xformers@689b03b
  • Loading branch information
bottler authored and xFormers Bot committed May 2, 2024
1 parent fd227d3 commit 82c6c7b
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
5 changes: 3 additions & 2 deletions xformers/ops/fmha/flash.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@
from flash_attn.flash_attn_interface import flash_attn_cuda as _C_flashattention

FLASH_VERSION = flash_attn.__version__
FLASH_VER_MIN = (2, 5, 2)
FLASH_VER_LAST = (2, 5, 6) # last supported, inclusive
FLASH_VER_MIN = (2, 5, 7)
FLASH_VER_LAST = (2, 5, 7) # last supported, inclusive
flash_ver_parsed = tuple(int(s) for s in FLASH_VERSION.split(".")[:3])
if (
flash_ver_parsed < FLASH_VER_MIN or flash_ver_parsed > FLASH_VER_LAST
Expand Down Expand Up @@ -145,6 +145,7 @@ def _flash_fwd(
cu_seq_lens_q,
cu_seq_lens_k,
seqused_k,
None, # block_table
None, # alibi_slopes
max_seq_len_q,
max_seq_len_k,
Expand Down

0 comments on commit 82c6c7b

Please sign in to comment.