ó
    R– jo  ã                   ó.  • % S r SSKrSSKJr  SSKJrJr  \R                  " \5      r	 " S S\5      r
\S\
S-  4   r\S   r0 q\\\4   \S	'   Sq\\\
4   S-  \S
'   S\\-  S\SS4S jrS\\-  SS4S jrS\\   4S jrS\S-  4S jrSS\SS4S jjrg)zÏRegistry for flash attention implementations.

This module contains the registration system for flash attention implementations.
It has no torch dependencies to avoid circular imports during initialization.
é    N)ÚCallable)ÚLiteralÚProtocolc                   ó   • \ rS rSrSS jrSrg)ÚFlashAttentionHandleé   Nc                 ó   • g )N© )Úselfs    Úm/root/GenerationalWealth/GenerationalWealth/venv/lib/python3.13/site-packages/torch/nn/attention/_registry.pyÚremoveÚFlashAttentionHandle.remove   s   € ˜có    r
   )ÚreturnN)Ú__name__Ú
__module__Ú__qualname__Ú__firstlineno__r   Ú__static_attributes__r
   r   r   r   r      s   † ß!r   r   .)ÚFA3ÚFA4Ú_FLASH_ATTENTION_IMPLSÚ_FLASH_ATTENTION_ACTIVEÚimplÚregister_fnr   c                ó   • U[         U '   g)aÓ  
Register the callable that activates a flash attention impl.

.. note::
    This function is intended for SDPA backend providers to register their
    implementations. End users should use :func:`activate_flash_attention_impl`
    to activate a registered implementation.

Args:
    impl: Implementation identifier (e.g., ``"FA4"``).
    register_fn: Callable that performs the actual dispatcher registration.
        This function will be invoked by :func:`activate_flash_attention_impl`
        and should register custom kernels with the PyTorch dispatcher.
        It may optionally return a handle implementing
        :class:`FlashAttentionHandle` to keep any necessary state alive.

Example:
    >>> def my_impl_register(module_path: str = "my_flash_impl"):
    ...     # Register custom kernels with torch dispatcher
    ...     pass  # doctest: +SKIP
    >>> register_flash_attention_impl(
    ...     "MyImpl", register_fn=my_impl_register
    ... )  # doctest: +SKIP
N)r   )r   r   s     r   Úregister_flash_attention_implr      s   € ð< $/Ô˜4Ò r   c                 ó”   • [        SS9  [        R                  U 5      nUc  [        SU  S[	        5        35      eU" 5       nUb  X4qgg)a#  
Activate into the dispatcher a previously registered flash attention impl.

.. note::
    Backend providers should NOT automatically activate their implementation
    on import. Users should explicitly opt-in by calling this function or via
    environment variables to ensure multiple provider libraries can coexist.

Args:
    impl: Implementation identifier to activate. See
        :func:`~torch.nn.attention.list_flash_attention_impls` for available
        implementations.
        If the backend's :func:`register_flash_attention_impl` callable
        returns a :class:`FlashAttentionHandle`, the registry keeps that
        handle alive for the lifetime of the process (until explicit
        uninstall support exists).

Example:
    >>> activate_flash_attention_impl("FA4")  # doctest: +SKIP
F)Ú_raise_warnNzUnknown flash attention impl 'z'. Available implementations: )Úrestore_flash_attention_implr   ÚgetÚ
ValueErrorÚlist_flash_attention_implsr   )r   r   Úhandles      r   Úactivate_flash_attention_implr%   =   sk   € ô2 !Øòô )×,Ñ,¨TÓ2€KØÑÜØ,¨T¨Fð 3*Ü*DÓ*FÐ)GðIó
ð 	
ñ
 ‹]€FØÑØ#' .Ñð r   c                  ó<   • [        [        R                  5       5      $ )zBReturn the names of all available flash attention implementations.)Úsortedr   Úkeysr
   r   r   r#   r#   f   s   € äÔ(×-Ñ-Ó/Ó0Ð0r   c                  ó.   • [         b	  [         S   $ [         $ )z
Return the currently activated flash attention impl name, if any.

``None`` indicates that no custom impl has been activated.
r   )r   r
   r   r   Úcurrent_flash_attention_implr*   k   s#   € ô #Ñ.ô 	  Ñ"ðô %ðr   r   c                 ó   • Sn[         b	  [         S   nUb  UR                  5         Sq gU (       a  [        R                  S5        Sq g)z(
Restore the default FA2 implementation
Né   zDTrying to restore default FA2 impl when no custom impl was activated)r   r   ÚloggerÚwarning)r   r$   s     r   r    r    x   sL   € ð €FÜÑ*Ü(¨Ñ+ˆàÑØ‰Œð #Ñö 
Ü‰ØRô	
ð #Ñr   )T)Ú__doc__ÚloggingÚcollections.abcr   Útypingr   r   Ú	getLoggerr   r-   r   Ú_RegisterFnÚ_FlashAttentionImplr   ÚdictÚstrÚ__annotations__r   Útupler   r%   Úlistr#   r*   Úboolr    r
   r   r   Ú<module>r<      sü   ðòó Ý $ß $ð 
×	Ò	˜8Ó	$€ô"˜8ô "ð sÐ0°4Ñ7Ð7Ñ8€Ø˜lÑ+Ð à13Ð ˜˜S +Ð-Ñ.Ó 3àCGÐ ˜˜sÐ$8Ð8Ñ9¸DÑ@Ó Gð/Ø
Ð#Ñ
#ð/ð ð/ð 
ô	/ðB&1Ø
Ð#Ñ
#ð&1à	ô&1ðR1 D¨¡Iô 1ð

 c¨D¡jô 
ñ#¨dð #¸dö #r   