o
    )i.                  
   @   s  d dl Z d dlZd dlmZ d dlmZmZmZmZm	Z	m
Z
 d dlZd dlZd dlmZ er4d dlmZ g dZdZded	e
d
 fddZded	efddZd:ded	efddZdejjdedejjfddZded	efddZdeded	efddZd;de
e d e
e fd!d"ZG d#d$ d$Z	 d<d%ejd&e	e  d'e d	e	ej fd(d)Z!d<d*d+Z"d,ejd	ejfd-d.Z#d/ejd0e	e  d	ejfd1d2Z$e j%d3e&d4ed5efd6d7Z'G d8d9 d9e(Z)dS )=    N)wraps)TYPE_CHECKINGAnyCallableDictListOptional)
AutoConfigModelCompressor)"infer_compressor_from_model_configfix_fsdp_module_nametensor_follows_mask_structurereplace_moduleis_compressed_tensors_configgetattr_chain
deprecated	Aliasablecombine_shardsshard_tensorpack_bitmasksunpack_bitmasks
patch_attrParameterizedDefaultDictZ_fsdp_wrapped_modulepretrained_model_name_or_pathreturnr   c                 C   sf   ddl m} ddlm} t| }||}|du rdS |d}|j|fi |}|j||d}|S )a  
    Given a path to a model config, extract a sparsity config if it exists and return
    the associated ModelCompressor

    :param pretrained_model_name_or_path: path to model config on disk or HF hub
    :return: matching compressor if config contains a sparsity config
    r   r
   )CompressionConfigNformat)config)	compressed_tensors.compressorsr   Zcompressed_tensors.configr   r	   Zfrom_pretrainedZparse_sparsity_configgetZload_from_registry)r   r   r   r   Zsparsity_configr   Z
compressor r!   l/home/app/PaddleOCR-VL-test/.venv_paddleocr/lib/python3.10/site-packages/compressed_tensors/utils/helpers.pyr   1   s   



r   namec                 C   s   |  td d dt dS )z
    Remove FSDP wrapper prefixes from a module name
    Accounts for scenario where FSDP_WRAPPER_NAME is
    at the end of the name, as well as in the middle.
    :param name: name to strip
    :return: stripped name
    . )replaceFSDP_WRAPPER_NAME)r#   r!   r!   r"   r   L   s   r   2:4maskc                 C   sP   t tt|d\}}| d|} | dkjdd}t||k s&t	 dS )a,  
    :param tensor: tensor to check
    :param mask: mask structure to check for, in the format "n:m"
    :return: True if the tensor follows the mask structure, False otherwise.
        Note, some weights can incidentally be zero, so we check for
        atleast n zeros in each chunk of size m
    :r      dimT)
tuplemapintsplitviewsumtorchallitem
ValueError)tensorr)   nmZzero_countsr!   r!   r"   r   Y   s   	r   model
new_modulec                 C   sT   d|v r| ddd }|t|d d  }| |}nd}| }|}t||| d S )Nr$   r,   r   r%   )rsplitlenZget_submodulesetattr)r<   r#   r=   parent_nameZ
child_nameparentr!   r!   r"   r   r   s   r   compression_configc                 C   s.   zddl m} t| |W S  ty   Y dS w )z
    Returns True if CompressedTensorsConfig is available from transformers and
    compression_config is an instance of CompressedTensorsConfig

    See: https://github.com/huggingface/transformers/pull/31704
    r   )CompressedTensorsConfigF)Z&transformers.utils.quantization_configrD   
isinstanceImportError)rC   rD   r!   r!   r"   r   ~   s   r   obj	chain_strc           	      O   s   t |dkrd}|d }nd|v rd}|d }nd}|d}| }|D ]}t||s9|r0|  S t| d| t||}q#|S )z
    Chain multiple getattr calls, separated by `.`

    :param obj: base object whose attributes are being retrieved
    :param chain_str: attribute names separated by `.`
    :param default: default value, throw error otherwise
    r,   Tr   defaultFr$   z object has no attribute )r?   r2   hasattrAttributeErrorgetattr)	rG   rH   argskwargsZhas_defaultrI   
attr_namesres	attr_namer!   r!   r"   r      s    



r   future_namemessagec                    s"   dt tgtf f fdd}|S )z
    Decorator to mark functions as deprecated

    :param new_function: Function called in place of deprecated function
    :param message: Deprecation message, replaces default deprecation message
    funcc                    sF   d u r j  dd urd d7 t  fdd}|S )Nz6 is deprecated and will be removed in a future releasez. Please use z	 instead.c                     s   t jtdd  | i |S )N   )
stacklevel)warningswarnDeprecationWarning)rM   rN   )rT   rS   r!   r"   wrapped   s   z.deprecated.<locals>.decorator.<locals>.wrapped)__name__r   )rT   rZ   rR   rS   )rT   r"   	decorator   s   
zdeprecated.<locals>.decorator)r   r   )rR   rS   r]   r!   r\   r"   r      s   r   c                   @   s:   e Zd ZdZedeeef fddZdd Zdd Z	d	S )
r   z
    A mixin for enums to allow aliasing of enum members

    Example:
    >>> class MyClass(Aliasable, int, Enum):
    >>>     ...
    r   c                   C   s   t  N)NotImplementedErrorr!   r!   r!   r"   get_aliases   s   zAliasable.get_aliasesc                 C   sl   t || jr |  }| j|jkp|| j| j||j|jkS |  }|| j| j}|||}||kS r^   )rE   	__class__r`   valuer    )selfotheraliasesZ
self_valueZother_valuer!   r!   r"   __eq__   s   zAliasable.__eq__c                 C   s   | j | j| j}t|S r^   )re   r    rb   hash)rc   Zcanonical_valuer!   r!   r"   __hash__   s   zAliasable.__hash__N)
r[   
__module____qualname____doc__staticmethodr   strr`   rf   rh   r!   r!   r!   r"   r      s    r   r9   shard_sizesr.   c                 C   sT   t || |krtdg }d}|D ]}|| }| |||}|| |}q|S )a  
    Shards a tensor into a list of tensors along a given dimension.

    raises: ValueError: If the sum of shard_sizes does not match the
        size of the tensor along the given dimension.

    :param tensor: The input tensor to shard.
    :param shard_sizes : List of sizes for each shard along the specified dimension.
    :param dim : The dimension along which to shard the tensor.
    :returns: A list of tensors sharded along the specified dimension.
    zSSum of shard_sizes must equal the size of the tensor along the specified dimension.r   )r4   sizer8   narrowappend)r9   rn   r.   shardsZ	start_idxro   Zend_idxshardr!   r!   r"   r      s   
r   c                    s   | st ddd | D }t|dkrt dt| d j}t fdd| D | < tj|| d j| d jd	}d}| D ]}|j  }|	 ||
| ||7 }q=|S )
z
    Combine decompressed shards along a given dimension using `narrow`.

    :param shards: List of decompressed shard tensors.
    :param dim: Dimension to combine along (default: 0).
    :return: Combined decompressed tensor.
    zThe list of shards is empty.c                 S   s   h | ]}|j qS r!   )dtype.0rs   r!   r!   r"   	<setcomp>  s    z!combine_shards.<locals>.<setcomp>r,   z$All shards must have the same dtype.r   c                 3   s    | ]}|j   V  qd S r^   )shaperu   r-   r!   r"   	<genexpr>  s    z!combine_shards.<locals>.<genexpr>)rt   device)r8   r?   listrx   r4   r5   Zzerosrt   rz   rp   Zcopy_)rr   r.   Zshard_dtypesZtotal_shapecombinedZshard_offsetrs   Z
shard_sizer!   r-   r"   r     s   

r   	bytemasksc                 C   s"   t j|   ddd}t|}|S )a  
    Converts a bytemask tensor to a bitmask tensor to reduce memory. Shape RxC will be
    compressed to R x ceil(C/8)

    :param bytemasks: mask tensor where each byte corresponds to a weight
    :return: mask tensor where each bit corresounds to a weight
    r+   little)axisbitorder)numpyZpackbitsr5   
from_numpy)r}   Zpacked_bits_numpyZpacked_bits_torchr!   r!   r"   r   (  s   
r   packed_bitmasksoriginal_shapec                 C   s8   t j|    d|d dd}t||t}|S )a#  
    Converts a bitmask tensor back to a bytemask tensor for use during decompression

    :param packed_bitmasks: mask tensor where each bit corresponds to a weight
    :param original_shape: dense shape to decompress to
    :return: boolean mask of weights in the original dense shape
    r+   r~   )r   countr   )r   Z
unpackbitscpur5   r   ZreshapeZastypebool)r   r   Zunpacked_bitsZunpacked_bitmasks_torchr!   r!   r"   r   6  s   
r   baseattrrb   c              
   c   sr    t  }t| ||}t| || zdV  W ||ur!t| || dS t| | dS ||ur3t| || w t| | w )a  
    Patch the value of an object attribute. Original value is restored upon exit

    :param base: object which has the attribute to patch
    :param attr: name of the the attribute to patch
    :param value: used to replace original value

    Usage:
    >>> from types import SimpleNamespace
    >>> obj = SimpleNamespace()
    >>> with patch_attr(obj, "attribute", "value"):
    ...     assert obj.attribute == "value"
    >>> assert not hasattr(obj, "attribute")
    N)objectrL   r@   delattr)r   r   rb   	_sentineloriginal_valuer!   r!   r"   r   P  s   r   c                   @   s0   e Zd ZdZdeegef fddZdd ZdS )r   a
  
    Similar to `collections.DefaultDict`, but upon fetching a key which is missing,
    the key is passed as arguments to the `default_factory`

    :param default_factory: function which takes a key as input and returns the
        corresponding default value
    default_factoryc                 C   s
   || _ d S r^   )r   )rc   r   r!   r!   r"   __init__v  s   
z!ParameterizedDefaultDict.__init__c                 C   s,   t |tr| j| }n| |}|| |< |S r^   )rE   r/   r   )rc   keyrb   r!   r!   r"   __missing__y  s
   

z$ParameterizedDefaultDict.__missing__N)r[   ri   rj   rk   r   r   r   r   r!   r!   r!   r"   r   m  s    r   )r(   )NN)r   )*
contextlibrW   	functoolsr   typingr   r   r   r   r   r   r   r5   Ztransformersr	   r   r   __all__r'   rm   r   r   r   r   nnModuler   r   r   r   r   ZTensorr1   r   r   r   r   contextmanagerr   r   dictr   r!   r!   r!   r"   <module>   sZ    
 

 !
