U
    9%e?                     @   s   d dl Z d dlZd dlmZ d dlmZmZmZmZm	Z	m
Z
 d dlZddlmZmZ ddlmZmZ ddlmZ ddlmZmZmZ e rd dlZeeZe	d	d
dZG dd
 d
ZG dd deZdS )    N)Path)DictListOptionalTypeTypeVarUnion   )CONFIG_NAMEPYTORCH_WEIGHTS_NAME)hf_hub_downloadis_torch_available)HfApi)SoftTemporaryDirectoryloggingvalidate_hf_hub_argsTModelHubMixin)boundc                   @   s  e Zd ZdZddddeeef ee ee e	ee dddZ
eddd	d
Zeeddddddddee eeef e	e	ee eeee	f  eeeef  e	ee ed
ddZeee eee eeeef  e	ee e	e	eeee	f  ed
ddZeddddddddddd
eee ee	ee ee ee ee	 eeee ef  eeee ef  eeee ef  edddZdS )r   a  
    A generic mixin to integrate ANY machine learning framework with the Hub.

    To integrate your framework, your model class must inherit from this class. Custom logic for saving/loading models
    have to be overwritten in  [`_from_pretrained`] and [`_save_pretrained`]. [`PyTorchModelHubMixin`] is a good example
    of mixin integration with the Hub. Check out our [integration guide](../guides/integrations) for more instructions.
    NF)configrepo_idpush_to_hub)save_directoryr   r   r   returnc                K   s   t |}|jddd | | t|tr>|t t| |r||	 }|dk	rZ||d< |dkrh|j
}| jf d|i|S dS )a  
        Save weights in local directory.

        Args:
            save_directory (`str` or `Path`):
                Path to directory in which the model weights and configuration will be saved.
            config (`dict`, *optional*):
                Model configuration specified as a key/value dictionary.
            push_to_hub (`bool`, *optional*, defaults to `False`):
                Whether or not to push your model to the Huggingface Hub after saving it.
            repo_id (`str`, *optional*):
                ID of your repository on the Hub. Used only if `push_to_hub=True`. Will default to the folder name if
                not provided.
            kwargs:
                Additional key word arguments passed along to the [`~ModelHubMixin._from_pretrained`] method.
        T)parentsexist_okNr   r   )r   mkdir_save_pretrained
isinstancedictr
   
write_textjsondumpscopynamer   )selfr   r   r   r   kwargs r'   X/var/www/html/Darija-Ai-API/env/lib/python3.8/site-packages/huggingface_hub/hub_mixin.pysave_pretrained    s    

zModelHubMixin.save_pretrainedr   r   c                 C   s   t dS )aD  
        Overwrite this method in subclass to define how to save your model.
        Check out our [integration guide](../guides/integrations) for instructions.

        Args:
            save_directory (`str` or `Path`):
                Path to directory in which the model weights and configuration will be saved.
        NNotImplementedError)r%   r   r'   r'   r(   r   L   s    	zModelHubMixin._save_pretrained)force_downloadresume_downloadproxiestoken	cache_dirlocal_files_onlyrevision)
clspretrained_model_name_or_pathr-   r.   r/   r0   r1   r2   r3   r   c                K   s  |}
d}t j|
rPtt |
kr2t j|
t}qtt dt|
	   nVt
|
trz"tt|
t|||||||d	}W n( tjjk
r   tt d Y nX |dk	rt|ddd}t|}W 5 Q R X |	d|i | jf t|
|||||||d	|	S )
a  
        Download a model from the Huggingface Hub and instantiate it.

        Args:
            pretrained_model_name_or_path (`str`, `Path`):
                - Either the `model_id` (string) of a model hosted on the Hub, e.g. `bigscience/bloom`.
                - Or a path to a `directory` containing model weights saved using
                    [`~transformers.PreTrainedModel.save_pretrained`], e.g., `../path/to/my_model_directory/`.
            revision (`str`, *optional*):
                Revision of the model on the Hub. Can be a branch name, a git tag or any commit id.
                Defaults to the latest commit on `main` branch.
            force_download (`bool`, *optional*, defaults to `False`):
                Whether to force (re-)downloading the model weights and configuration files from the Hub, overriding
                the existing cache.
            resume_download (`bool`, *optional*, defaults to `False`):
                Whether to delete incompletely received files. Will attempt to resume the download if such a file exists.
            proxies (`Dict[str, str]`, *optional*):
                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
                'http://hostname': 'foo.bar:4012'}`. The proxies are used on every request.
            token (`str` or `bool`, *optional*):
                The token to use as HTTP bearer authorization for remote files. By default, it will use the token
                cached when running `huggingface-cli login`.
            cache_dir (`str`, `Path`, *optional*):
                Path to the folder where cached files are stored.
            local_files_only (`bool`, *optional*, defaults to `False`):
                If `True`, avoid downloading the file and return the path to the local cached file if it exists.
            model_kwargs (`Dict`, *optional*):
                Additional kwargs to pass to the model during initialization.
        Nz not found in 	r   filenamer3   r1   r-   r/   r.   r0   r2   z not found in HuggingFace Hub.rzutf-8)encodingr   )model_idr3   r1   r-   r/   r.   r2   r0   )ospathisdirr
   listdirjoinloggerwarningr   resolver   strr   requests
exceptionsRequestExceptionopenr!   loadupdate_from_pretrained)r4   r5   r-   r.   r/   r0   r1   r2   r3   model_kwargsr:   config_filefr   r'   r'   r(   from_pretrainedW   sJ    ,

	zModelHubMixin.from_pretrained)
r4   r:   r3   r1   r-   r/   r.   r2   r0   r   c          
      K   s   t dS )a  Overwrite this method in subclass to define how to load your model from pretrained.

        Use [`hf_hub_download`] or [`snapshot_download`] to download files from the Hub before loading them. Most
        args taken as input can be directly passed to those 2 methods. If needed, you can add more arguments to this
        method using "model_kwargs". For example [`PyTorchModelHubMixin._from_pretrained`] takes as input a `map_location`
        parameter to set on which device the model should be loaded.

        Check out our [integration guide](../guides/integrations) for more instructions.

        Args:
            model_id (`str`):
                ID of the model to load from the Huggingface Hub (e.g. `bigscience/bloom`).
            revision (`str`, *optional*):
                Revision of the model on the Hub. Can be a branch name, a git tag or any commit id. Defaults to the
                latest commit on `main` branch.
            force_download (`bool`, *optional*, defaults to `False`):
                Whether to force (re-)downloading the model weights and configuration files from the Hub, overriding
                the existing cache.
            resume_download (`bool`, *optional*, defaults to `False`):
                Whether to delete incompletely received files. Will attempt to resume the download if such a file exists.
            proxies (`Dict[str, str]`, *optional*):
                A dictionary of proxy servers to use by protocol or endpoint (e.g., `{'http': 'foo.bar:3128',
                'http://hostname': 'foo.bar:4012'}`).
            token (`str` or `bool`, *optional*):
                The token to use as HTTP bearer authorization for remote files. By default, it will use the token
                cached when running `huggingface-cli login`.
            cache_dir (`str`, `Path`, *optional*):
                Path to the folder where cached files are stored.
            local_files_only (`bool`, *optional*, defaults to `False`):
                If `True`, avoid downloading the file and return the path to the local cached file if it exists.
            model_kwargs:
                Additional keyword arguments passed along to the [`~ModelHubMixin._from_pretrained`] method.
        Nr+   )
r4   r:   r3   r1   r-   r/   r.   r2   r0   rK   r'   r'   r(   rJ      s    /zModelHubMixin._from_pretrainedz!Push model using huggingface_hub.)
r   commit_messageprivateapi_endpointr0   branch	create_prallow_patternsignore_patternsdelete_patterns)r   r   rO   rP   rQ   r0   rR   rS   rT   rU   rV   r   c       
         C   sr   t ||d}|j||ddj}t D}t|| }| j||d |j|d|||||	|
|d	W  5 Q R  S Q R X dS )a  
        Upload model checkpoint to the Hub.

        Use `allow_patterns` and `ignore_patterns` to precisely filter which files should be pushed to the hub. Use
        `delete_patterns` to delete existing remote files in the same commit. See [`upload_folder`] reference for more
        details.


        Args:
            repo_id (`str`):
                ID of the repository to push to (example: `"username/my-model"`).
            config (`dict`, *optional*):
                Configuration object to be saved alongside the model weights.
            commit_message (`str`, *optional*):
                Message to commit while pushing.
            private (`bool`, *optional*, defaults to `False`):
                Whether the repository created should be private.
            api_endpoint (`str`, *optional*):
                The API endpoint to use when pushing the model to the hub.
            token (`str`, *optional*):
                The token to use as HTTP bearer authorization for remote files. By default, it will use the token
                cached when running `huggingface-cli login`.
            branch (`str`, *optional*):
                The git branch on which to push the model. This defaults to `"main"`.
            create_pr (`boolean`, *optional*):
                Whether or not to create a Pull Request from `branch` with that commit. Defaults to `False`.
            allow_patterns (`List[str]` or `str`, *optional*):
                If provided, only files matching at least one pattern are pushed.
            ignore_patterns (`List[str]` or `str`, *optional*):
                If provided, files matching any of the patterns are not pushed.
            delete_patterns (`List[str]` or `str`, *optional*):
                If provided, remote files matching any of the patterns will be deleted from the repo.

        Returns:
            The url of the commit of your model in the given repository.
        )Zendpointr0   T)r   rP   r   )r   model)	r   Z	repo_typeZfolder_pathrO   r3   rS   rT   rU   rV   N)r   Zcreate_repor   r   r   r)   Zupload_folder)r%   r   r   rO   rP   rQ   r0   rR   rS   rT   rU   rV   apitmpZ
saved_pathr'   r'   r(   r      s     4zModelHubMixin.push_to_hub)__name__
__module____qualname____doc__r   rC   r   r   r   boolr)   r   classmethodr   r   r   r   rN   rJ   r   r   r'   r'   r'   r(   r      s   
,
R0c                   @   sj   e Zd ZdZeddddZedddeee ee	eef  e
ee e
e
e	ee
df ee
d	
d
dZdS )PyTorchModelHubMixinaL  
    Implementation of [`ModelHubMixin`] to provide model Hub upload/download capabilities to PyTorch models. The model
    is set in evaluation mode by default using `model.eval()` (dropout modules are deactivated). To train the model,
    you should first set it back in training mode with `model.train()`.

    Example:

    ```python
    >>> import torch
    >>> import torch.nn as nn
    >>> from huggingface_hub import PyTorchModelHubMixin


    >>> class MyModel(nn.Module, PyTorchModelHubMixin):
    ...     def __init__(self):
    ...         super().__init__()
    ...         self.param = nn.Parameter(torch.rand(3, 4))
    ...         self.linear = nn.Linear(4, 5)

    ...     def forward(self, x):
    ...         return self.linear(x + self.param)
    >>> model = MyModel()

    # Save model weights to local directory
    >>> model.save_pretrained("my-awesome-model")

    # Push model weights to the Hub
    >>> model.push_to_hub("my-awesome-model")

    # Download and initialize weights from the Hub
    >>> model = MyModel.from_pretrained("username/my-awesome-model")
    ```
    Nr*   c                 C   s,   t | dr| jn| }t| |t  dS )z7Save weights from a Pytorch model to a local directory.moduleN)hasattrra   torchsave
state_dictr   )r%   r   Zmodel_to_saver'   r'   r(   r   G  s    z%PyTorchModelHubMixin._save_pretrainedcpuF)map_locationstrict)
r:   r3   r1   r-   r/   r.   r2   r0   rg   rh   c       
         K   sv   t j|r$td t j|t}nt|t|||||||d	}| f |}tj|t	|	d}|j
||
d |  |S )z<Load Pytorch pretrained weights and return the loaded model.z$Loading weights from local directoryr6   )rg   )rh   )r;   r<   r=   printr?   r   r   rc   rH   ZdeviceZload_state_dicteval)r4   r:   r3   r1   r-   r/   r.   r2   r0   rg   rh   rK   Z
model_filerW   re   r'   r'   r(   rJ   L  s&    
z%PyTorchModelHubMixin._from_pretrained)rZ   r[   r\   r]   r   r   r_   rC   r   r   r^   r   rJ   r'   r'   r'   r(   r`   $  s"   "r`   )r!   r;   pathlibr   typingr   r   r   r   r   r   rD   	constantsr
   r   Zfile_downloadr   r   Zhf_apir   utilsr   r   r   rc   Z
get_loggerrZ   r@   r   r   r`   r'   r'   r'   r(   <module>   s     
  