-
Notifications
You must be signed in to change notification settings - Fork 6.4k
[core] use kernels
to support _flash_3_hub
attention backend
#12236
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from 16 commits
Commits
Show all changes
21 commits
Select commit
Hold shift + click to select a range
827fc15
feat: try loading fa3 using kernels when available.
sayakpaul a0177eb
up
sayakpaul ac43e84
Merge branch 'main' into fa3-from-kernels
sayakpaul bc40971
change to Hub.
sayakpaul 87d0879
up
sayakpaul 2bb3796
up
sayakpaul 4e69d42
up
sayakpaul 595ae6b
Merge branch 'main' into fa3-from-kernels
sayakpaul 548f56e
Merge branch 'main' into fa3-from-kernels
sayakpaul 0e7eac0
Merge branch 'main' into fa3-from-kernels
sayakpaul 6d5c247
Merge branch 'main' into fa3-from-kernels
sayakpaul c2a5aff
switch env var.
sayakpaul 0097c57
up
sayakpaul 943b4a8
up
sayakpaul 66a6811
up
sayakpaul 93c3eb9
up
sayakpaul a1e1faf
up
sayakpaul 25c701d
up
sayakpaul 8dada04
Merge branch 'main' into fa3-from-kernels
sayakpaul 9168e62
Merge branch 'main' into fa3-from-kernels
sayakpaul 648c9dc
Merge branch 'main' into fa3-from-kernels
sayakpaul File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -26,6 +26,7 @@ | |
is_flash_attn_3_available, | ||
is_flash_attn_available, | ||
is_flash_attn_version, | ||
is_kernels_available, | ||
is_sageattention_available, | ||
is_sageattention_version, | ||
is_torch_npu_available, | ||
|
@@ -35,7 +36,7 @@ | |
is_xformers_available, | ||
is_xformers_version, | ||
) | ||
from ..utils.constants import DIFFUSERS_ATTN_BACKEND, DIFFUSERS_ATTN_CHECKS | ||
from ..utils.constants import DIFFUSERS_ATTN_BACKEND, DIFFUSERS_ATTN_CHECKS, DIFFUSERS_ENABLE_HUB_KERNELS | ||
|
||
|
||
_REQUIRED_FLASH_VERSION = "2.6.3" | ||
|
@@ -67,6 +68,17 @@ | |
flash_attn_3_func = None | ||
flash_attn_3_varlen_func = None | ||
|
||
if DIFFUSERS_ENABLE_HUB_KERNELS: | ||
if not is_kernels_available(): | ||
raise ImportError( | ||
"To use FA3 kernel for your hardware from the Hub, the `kernels` library must be installed. Install with `pip install kernels`." | ||
) | ||
from ..utils.kernels_utils import _get_fa3_from_hub | ||
|
||
flash_attn_interface_hub = _get_fa3_from_hub() | ||
sayakpaul marked this conversation as resolved.
Show resolved
Hide resolved
|
||
flash_attn_3_func_hub = flash_attn_interface_hub.flash_attn_func | ||
else: | ||
flash_attn_3_func_hub = None | ||
|
||
if _CAN_USE_SAGE_ATTN: | ||
from sageattention import ( | ||
|
@@ -129,7 +141,6 @@ def wrap(func): | |
return wrap if fn is None else fn | ||
|
||
_custom_op = custom_op_no_op | ||
_register_fake = register_fake_no_op | ||
sayakpaul marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
|
||
logger = get_logger(__name__) # pylint: disable=invalid-name | ||
|
@@ -153,6 +164,8 @@ class AttentionBackendName(str, Enum): | |
FLASH_VARLEN = "flash_varlen" | ||
_FLASH_3 = "_flash_3" | ||
_FLASH_VARLEN_3 = "_flash_varlen_3" | ||
_FLASH_3_HUB = "_flash_3_hub" | ||
# _FLASH_VARLEN_3_HUB = "_flash_varlen_3_hub" # not supported yet. | ||
|
||
# PyTorch native | ||
FLEX = "flex" | ||
|
@@ -351,6 +364,13 @@ def _check_attention_backend_requirements(backend: AttentionBackendName) -> None | |
f"Flash Attention 3 backend '{backend.value}' is not usable because of missing package or the version is too old. Please build FA3 beta release from source." | ||
) | ||
|
||
# TODO: add support Hub variant of FA3 varlen later | ||
elif backend in [AttentionBackendName._FLASH_3_HUB]: | ||
if not is_kernels_available(): | ||
raise RuntimeError( | ||
f"Flash Attention 3 Hub backend '{backend.value}' is not usable because the `kernels` package isn't available. Please install it with `pip install kernels`." | ||
) | ||
|
||
elif backend in [ | ||
AttentionBackendName.SAGE, | ||
AttentionBackendName.SAGE_VARLEN, | ||
|
@@ -657,6 +677,42 @@ def _flash_attention_3( | |
return (out, lse) if return_attn_probs else out | ||
|
||
|
||
@_AttentionBackendRegistry.register( | ||
AttentionBackendName._FLASH_3_HUB, | ||
constraints=[_check_device, _check_qkv_dtype_bf16_or_fp16, _check_shape], | ||
) | ||
def _flash_attention_3_hub( | ||
query: torch.Tensor, | ||
key: torch.Tensor, | ||
value: torch.Tensor, | ||
scale: Optional[float] = None, | ||
is_causal: bool = False, | ||
window_size: Tuple[int, int] = (-1, -1), | ||
softcap: float = 0.0, | ||
deterministic: bool = False, | ||
return_attn_probs: bool = False, | ||
) -> torch.Tensor: | ||
out = flash_attn_3_func_hub( | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Follow (internal) this link |
||
q=query, | ||
k=key, | ||
v=value, | ||
softmax_scale=scale, | ||
causal=is_causal, | ||
qv=None, | ||
q_descale=None, | ||
k_descale=None, | ||
v_descale=None, | ||
window_size=window_size, | ||
softcap=softcap, | ||
num_splits=1, | ||
pack_gqa=None, | ||
deterministic=deterministic, | ||
sm_margin=0, | ||
) | ||
lse = None | ||
return (out, lse) if return_attn_probs else out | ||
|
||
|
||
@_AttentionBackendRegistry.register( | ||
AttentionBackendName._FLASH_VARLEN_3, | ||
constraints=[_check_device, _check_qkv_dtype_bf16_or_fp16, _check_shape], | ||
|
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
from ..utils import get_logger | ||
from .import_utils import is_kernels_available | ||
|
||
|
||
logger = get_logger(__name__) | ||
|
||
|
||
_DEFAULT_HUB_ID_FA3 = "kernels-community/flash-attn3" | ||
|
||
|
||
def _get_fa3_from_hub(): | ||
if not is_kernels_available(): | ||
return None | ||
else: | ||
from kernels import get_kernel | ||
|
||
try: | ||
# TODO: temporary revision for now. Remove when merged upstream into `main`. | ||
flash_attn_3_hub = get_kernel(_DEFAULT_HUB_ID_FA3, revision="fake-ops") | ||
return flash_attn_3_hub | ||
except Exception as e: | ||
logger.error(f"An error occurred while fetching kernel '{_DEFAULT_HUB_ID_FA3}' from the Hub: {e}") | ||
raise |
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Made it a constant in
constants.py
as I think it will be shared across modules.